Compare commits
	
		
			586 Commits
		
	
	
		
			2011.12.15
			...
			2013.02.02
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 085c8b75a6 | ||
|  | dbf2ba3d61 | ||
|  | b47bbac393 | ||
|  | 229cac754a | ||
|  | 0e33684194 | ||
|  | 9e982f9e4e | ||
|  | c7a725cfad | ||
|  | 450a30cae8 | ||
|  | 9cd5e4fce8 | ||
|  | edba5137b8 | ||
|  | 233a22960a | ||
|  | 3b024e17af | ||
|  | a32b573ccb | ||
|  | ec71c13ab8 | ||
|  | f0bad2b026 | ||
|  | 25580f3251 | ||
|  | da4de959df | ||
|  | d0d51a8afa | ||
|  | c67598c3e1 | ||
|  | 811d253bc2 | ||
|  | c3a1642ead | ||
|  | ccf65f9dee | ||
|  | b954070d70 | ||
|  | 30e9f4496b | ||
|  | 271d3fbdaa | ||
|  | 6df40dcbe0 | ||
|  | 97f194c1fb | ||
|  | 4da769ccca | ||
|  | 253d96f2e2 | ||
|  | bbc3e2753a | ||
|  | 67353612ba | ||
|  | bffbd5f038 | ||
|  | d8bbf2018e | ||
|  | 187f491ad2 | ||
|  | 335959e778 | ||
|  | 3b83bf8f6a | ||
|  | 51719893bf | ||
|  | 1841f65e64 | ||
|  | bb28998920 | ||
|  | fbc5f99db9 | ||
|  | ca0a0bbeec | ||
|  | 6119f78cb9 | ||
|  | 539679c7f9 | ||
|  | b642cd44c1 | ||
|  | fffec3b9d9 | ||
|  | 3446dfb7cb | ||
|  | db16276b7c | ||
|  | 629fcdd135 | ||
|  | 64ce2aada8 | ||
|  | 565f751967 | ||
|  | 6017964580 | ||
|  | 1d16b0c3fe | ||
|  | 7851b37993 | ||
|  | d81edc573e | ||
|  | ef0c8d5f9f | ||
|  | db30f02b50 | ||
|  | 4ba7262467 | ||
|  | 67d0c25eab | ||
|  | 09f9552b40 | ||
|  | 142d38f776 | ||
|  | 6dd3471900 | ||
|  | 280d67896a | ||
|  | 510e6f6dc1 | ||
|  | 712e86b999 | ||
|  | 74fdba620d | ||
|  | dc1c479a6f | ||
|  | 119d536e07 | ||
|  | fa1bf9c653 | ||
|  | 814eed0ea1 | ||
|  | 0aa3068e9e | ||
|  | db2d6124b1 | ||
|  | 039dc61bd2 | ||
|  | 4b879984ea | ||
|  | 55e286ba55 | ||
|  | 9450bfa26e | ||
|  | 18be482a6f | ||
|  | ca6710ee41 | ||
|  | 9314810243 | ||
|  | 7717ae19fa | ||
|  | 32635ec685 | ||
|  | caec7618a1 | ||
|  | 7e7ab2815c | ||
|  | d7744f2219 | ||
|  | 7161829de5 | ||
|  | 991ba7fae3 | ||
|  | a7539296ce | ||
|  | 258d5850c9 | ||
|  | 20759b340a | ||
|  | 8e5f761870 | ||
|  | 26714799c9 | ||
|  | 5e9d042d8f | ||
|  | 9cf98a2bcc | ||
|  | f5ebb61495 | ||
|  | 431d88dd31 | ||
|  | 876f1a86af | ||
|  | 01951dda7a | ||
|  | 6e3dba168b | ||
|  | d851e895d5 | ||
|  | b962b76f43 | ||
|  | 26cf040827 | ||
|  | 8e241d1a1a | ||
|  | 3a648b209c | ||
|  | c80f0a417a | ||
|  | 4fcca4bb18 | ||
|  | 511eda8eda | ||
|  | 5f9551719c | ||
|  | d830b7c297 | ||
|  | 1c256f7047 | ||
|  | a34dd63beb | ||
|  | 4aeae91f86 | ||
|  | c073e35b1e | ||
|  | 5c892b0ba9 | ||
|  | 6985325e01 | ||
|  | 911ee27e83 | ||
|  | 2069acc6a4 | ||
|  | 278986ea0f | ||
|  | 6535e9511f | ||
|  | 60c7520a51 | ||
|  | deb594a9a0 | ||
|  | e314ba675b | ||
|  | 0214ce7c75 | ||
|  | 95fedbf86b | ||
|  | b7769a05ec | ||
|  | 067f6a3536 | ||
|  | 8cad53e84c | ||
|  | d5ed35b664 | ||
|  | f427df17ab | ||
|  | 4e38899e97 | ||
|  | cb6ff87fbb | ||
|  | 0deac3a2d8 | ||
|  | 92e3e18a1d | ||
|  | 3bb6165927 | ||
|  | d0d4f277da | ||
|  | 99b0a1292b | ||
|  | dc23886a77 | ||
|  | b7298b6e2a | ||
|  | 3e6c3f52a9 | ||
|  | 0c0074328b | ||
|  | f0648fc18c | ||
|  | a7c0f8602e | ||
|  | 21a9c6aaac | ||
|  | 162e3c5261 | ||
|  | 6b3aef80ce | ||
|  | 77c4beab8a | ||
|  | 1a2c3c0f3e | ||
|  | 0eaf520d77 | ||
|  | 056d857571 | ||
|  | 69a3883199 | ||
|  | 0dcfb234ed | ||
|  | 43e8fafd49 | ||
|  | 314d506b96 | ||
|  | af42895612 | ||
|  | bfa6389b74 | ||
|  | 9b14f51a3e | ||
|  | f4bfd65ff2 | ||
|  | 3cc687d486 | ||
|  | cdb3076445 | ||
|  | 8a2f13c304 | ||
|  | 77bd7968ea | ||
|  | 993693aa79 | ||
|  | ce4be3a91d | ||
|  | 937021133f | ||
|  | f7b111b7d1 | ||
|  | 80d3177e5c | ||
|  | 5e5ddcfbcf | ||
|  | 5910e210f4 | ||
|  | b375c8b946 | ||
|  | 88f6c78b02 | ||
|  | 4096b60948 | ||
|  | 2ab1c5ed1a | ||
|  | 0b40544f29 | ||
|  | 187da2c093 | ||
|  | 9a2cf56d51 | ||
|  | 0be41ec241 | ||
|  | f1171f7c2d | ||
|  | 28ca6b5afa | ||
|  | bec102a843 | ||
|  | 8f6f40d991 | ||
|  | e2a8ff24a9 | ||
|  | 8588a86f9e | ||
|  | 5cb9c3129b | ||
|  | 4cc3d07426 | ||
|  | 5d01a64719 | ||
|  | a276e06080 | ||
|  | fd5ff02042 | ||
|  | 2b5b2cb84c | ||
|  | ca6849e65d | ||
|  | 1535ac2ae9 | ||
|  | a4680a590f | ||
|  | fedb6816cd | ||
|  | f6152b4b64 | ||
|  | 4b618047ce | ||
|  | 2c6945be30 | ||
|  | 9a6f4429a0 | ||
|  | 4c21c56bfe | ||
|  | 2a298b72eb | ||
|  | 55c0539872 | ||
|  | 9789a05c20 | ||
|  | d050de77f9 | ||
|  | 95eb771dcd | ||
|  | 4fb1acc212 | ||
|  | d3d3199870 | ||
|  | 1ca63e3ae3 | ||
|  | 59ce201915 | ||
|  | 8d5d3a5d00 | ||
|  | 37c8fd4842 | ||
|  | 3c6ffbaedb | ||
|  | c7287a3caf | ||
|  | 5a304a7637 | ||
|  | 4c1d273e88 | ||
|  | a9d2f7e894 | ||
|  | 682407f2d5 | ||
|  | bdff345529 | ||
|  | 23109d6a9c | ||
|  | 4bb028f48e | ||
|  | fec89790b1 | ||
|  | a5741a3f5e | ||
|  | 863baa16ec | ||
|  | c7214f9a6f | ||
|  | 8fd3afd56c | ||
|  | f9b2f2b955 | ||
|  | 633b4a5ff6 | ||
|  | b4cd069d5e | ||
|  | 0f8d03f81c | ||
|  | 077174f4ed | ||
|  | e387eb5aba | ||
|  | 4083bf81a0 | ||
|  | 796173d08b | ||
|  | e575b6821e | ||
|  | d78be7e331 | ||
|  | 15c8d83358 | ||
|  | e91d2338d8 | ||
|  | 4b235346d6 | ||
|  | ad348291bb | ||
|  | 2f1765c4ea | ||
|  | 3c5b63d2d6 | ||
|  | cc51a7d4e0 | ||
|  | 8af4ed7b4f | ||
|  | 8192ebe1f8 | ||
|  | 20ba04267c | ||
|  | 743b28ce11 | ||
|  | caaa47d372 | ||
|  | 10f100ac8a | ||
|  | 8176041605 | ||
|  | 87bec4c715 | ||
|  | 190e8e27d8 | ||
|  | 4efe62a016 | ||
|  | c64de2c980 | ||
|  | 6ad98fb3fd | ||
|  | b08e09c370 | ||
|  | cdab8aa389 | ||
|  | 3cd69a54b2 | ||
|  | 627dcfff39 | ||
|  | df5cff3751 | ||
|  | 79ae0a06d5 | ||
|  | 2d2fa229ec | ||
|  | 5a59fd6392 | ||
|  | 0eb0faa26f | ||
|  | 32761d863c | ||
|  | 799c076384 | ||
|  | f1cb5bcad2 | ||
|  | 9e8056d5a7 | ||
|  | c6f3620859 | ||
|  | 59ae15a507 | ||
|  | 40b35b4aa6 | ||
|  | be0f77d075 | ||
|  | 0f00efed4c | ||
|  | e6137fd61d | ||
|  | 8cd10ac4ef | ||
|  | 64a57846d3 | ||
|  | 72f976701a | ||
|  | 5bd9cc7a6a | ||
|  | f660c89d51 | ||
|  | 73dce4b2e4 | ||
|  | 9f37a95941 | ||
|  | a130bc6d02 | ||
|  | 348d0a7a18 | ||
|  | 03f9daab34 | ||
|  | a8156c1d2e | ||
|  | 3e669f369f | ||
|  | da779b4924 | ||
|  | 89fb51dd2d | ||
|  | 01ba00ca42 | ||
|  | e08bee320e | ||
|  | 96731798db | ||
|  | c116339ddb | ||
|  | e643e2c6b7 | ||
|  | c63cc10ffa | ||
|  | dae7c920f6 | ||
|  | f462df021a | ||
|  | 1a84d8675b | ||
|  | 18ea0cefc3 | ||
|  | c806f804d8 | ||
|  | 03c5b0fbd4 | ||
|  | 95649b3936 | ||
|  | 3aeb78ea4e | ||
|  | dd109dee8e | ||
|  | b514df2034 | ||
|  | 0969bdd305 | ||
|  | 1a9c655e3b | ||
|  | 88db5ef279 | ||
|  | f8d8b39bba | ||
|  | dcd60025f8 | ||
|  | 7e4674830e | ||
|  | 9ce5d9ee75 | ||
|  | b49e75ff9a | ||
|  | abe7a3ac2a | ||
|  | 717b1f72ed | ||
|  | 26396311b5 | ||
|  | dffe658bac | ||
|  | 33d94a6c99 | ||
|  | 4d47921c9e | ||
|  | d94adc2638 | ||
|  | 5c5d06d31d | ||
|  | cc872b68a8 | ||
|  | 17cb14a336 | ||
|  | 877f4c45d3 | ||
|  | 02531431f2 | ||
|  | e02066e7ff | ||
|  | c9128b353d | ||
|  | e7c6f1a2dc | ||
|  | 1a911e60a4 | ||
|  | 46cbda0be4 | ||
|  | fa59f4b6a9 | ||
|  | 4a702f3819 | ||
|  | 6bac102a4d | ||
|  | 958a22b7cf | ||
|  | 97cd3afc75 | ||
|  | aa2a94ed81 | ||
|  | c7032546f1 | ||
|  | 56781d3d2e | ||
|  | feb22fe5fe | ||
|  | d8dddb7c02 | ||
|  | 4408d996fb | ||
|  | ed7516c69d | ||
|  | 89af8e9d32 | ||
|  | 36a9c0b5ff | ||
|  | 9fb3bfb45a | ||
|  | d479e34043 | ||
|  | 240089e5df | ||
|  | 1c469a9480 | ||
|  | 71f36332dd | ||
|  | 8179d2ba74 | ||
|  | df4bad3245 | ||
|  | a7b5c8d6a8 | ||
|  | 92b91c1878 | ||
|  | 7ec1a206ea | ||
|  | 51937c0869 | ||
|  | 6b50761222 | ||
|  | 6571408dc6 | ||
|  | b6fab35b9f | ||
|  | baec15387c | ||
|  | 297d7fd9c0 | ||
|  | 5002aea371 | ||
|  | 5f7ad21633 | ||
|  | 089d47f8d5 | ||
|  | 74033a662d | ||
|  | fdef722fa1 | ||
|  | 110d4f4c91 | ||
|  | 0526e4f55a | ||
|  | 39973a0236 | ||
|  | 5d40a470a2 | ||
|  | 4cc391461a | ||
|  | bf95333e5e | ||
|  | b7a34316d2 | ||
|  | 74e453bdea | ||
|  | 156a59e7a9 | ||
|  | aeca861f22 | ||
|  | 42cb53fcfa | ||
|  | fe4d68e196 | ||
|  | 25b7fd9c01 | ||
|  | e79e8b7dc4 | ||
|  | 965a8b2bc4 | ||
|  | a8ac2f8664 | ||
|  | fb0e99b884 | ||
|  | 9c6e9a4532 | ||
|  | 67af74992e | ||
|  | 103c508ffa | ||
|  | 2876773381 | ||
|  | f06eaa873e | ||
|  | ece34e8951 | ||
|  | 2262a32dd7 | ||
|  | c6c0e23a32 | ||
|  | 02b324a23d | ||
|  | b8005afc20 | ||
|  | 073522bc6c | ||
|  | 9248cb0549 | ||
|  | 6b41b61119 | ||
|  | 591bbe9c90 | ||
|  | fc7376016c | ||
|  | 97a37c2319 | ||
|  | 3afed78a6a | ||
|  | 4279a0ca98 | ||
|  | edcc7d2dd3 | ||
|  | 7f60b5aa40 | ||
|  | 65adb79fb6 | ||
|  | aeeb29a356 | ||
|  | 902b2a0a45 | ||
|  | 6d9c22cd26 | ||
|  | 729baf58b2 | ||
|  | 4c9afeca34 | ||
|  | 6da7877bf5 | ||
|  | b4e5de51ec | ||
|  | a4b5f22554 | ||
|  | ff08984246 | ||
|  | 137c5803c3 | ||
|  | 3eec021a1f | ||
|  | 5a33b73309 | ||
|  | 0b4e98490b | ||
|  | 80a846e119 | ||
|  | 434d60cd95 | ||
|  | efe8902f0b | ||
|  | 44fb345437 | ||
|  | 9993976ae4 | ||
|  | b387fb0385 | ||
|  | 10daa766a1 | ||
|  | 7b107eea51 | ||
|  | 646b885cbf | ||
|  | 0bfd0b598a | ||
|  | fd873c69a4 | ||
|  | d64db7409b | ||
|  | 27fec0e3bd | ||
|  | 65f934dc93 | ||
|  | d51d784f85 | ||
|  | aa85963987 | ||
|  | 413575f7a5 | ||
|  | b7b4796bf2 | ||
|  | fcbc8c830e | ||
|  | f48ce130c7 | ||
|  | 13e69f546c | ||
|  | 63ec7b7479 | ||
|  | 7b6d7001d8 | ||
|  | 39ce6e79e7 | ||
|  | 5c961d89df | ||
|  | 3c4d6c9eba | ||
|  | 349e2e3e21 | ||
|  | 551fa9dfbf | ||
|  | ce3674430b | ||
|  | 5cdfaeb37b | ||
|  | 38612b4edc | ||
|  | 6c5b442a9b | ||
|  | 5a5523698d | ||
|  | 05a2c206be | ||
|  | 8ca21983d8 | ||
|  | 20326b8b1b | ||
|  | 5d534e2fe6 | ||
|  | 234e230c87 | ||
|  | 34ae0f9d20 | ||
|  | df09e5f9e1 | ||
|  | 3af2f7656c | ||
|  | 74e716bb64 | ||
|  | 85f76ac90b | ||
|  | 7f36e39676 | ||
|  | ebe3f89ea4 | ||
|  | b5de8af234 | ||
|  | eb817499b0 | ||
|  | e2af9232b2 | ||
|  | 9ca667065e | ||
|  | ae16f68f4a | ||
|  | 3cd98c7894 | ||
|  | 2866e68838 | ||
|  | be8786a6a4 | ||
|  | 0e841bdc54 | ||
|  | 225dceb046 | ||
|  | b0d4f95899 | ||
|  | d443aca863 | ||
|  | 2ebc6e6a92 | ||
|  | f2ad10a97d | ||
|  | ea46fe2dd4 | ||
|  | 202e76cfb0 | ||
|  | 3a68d7b467 | ||
|  | 795cc5059a | ||
|  | 5dc846fad0 | ||
|  | d5c4c4c10e | ||
|  | 1ac3e3315e | ||
|  | 0e4dc2fc74 | ||
|  | 9bb8dc8e42 | ||
|  | 154b55dae3 | ||
|  | 6de7ef9b8d | ||
|  | 392105265c | ||
|  | 51661d8600 | ||
|  | b5809a68bf | ||
|  | 7733d455c8 | ||
|  | 0a98b09bc2 | ||
|  | 302efc19ea | ||
|  | 55a1fa8a56 | ||
|  | dce1088450 | ||
|  | a171dbfc27 | ||
|  | 11a141dec9 | ||
|  | 818282710b | ||
|  | 7a7c093ab0 | ||
|  | ce7b2a40d0 | ||
|  | cfcec69331 | ||
|  | 91645066e2 | ||
|  | dee5d76923 | ||
|  | 363a4e1114 | ||
|  | ef0c08cdfe | ||
|  | 3210735c49 | ||
|  | aab4fca422 | ||
|  | 891d7f2329 | ||
|  | b24676ce88 | ||
|  | cca4828ac9 | ||
|  | bae611f216 | ||
|  | d4e16d3e97 | ||
|  | 65dc7d0272 | ||
|  | 5404179338 | ||
|  | 7df97fb59f | ||
|  | 3187e42a23 | ||
|  | f1927d71e4 | ||
|  | eeeb4daabc | ||
|  | 3c4fc580bb | ||
|  | 17f3c40a31 | ||
|  | 505ed3088f | ||
|  | 0b976545c7 | ||
|  | a047951477 | ||
|  | 6ab92c8b62 | ||
|  | f36cd07685 | ||
|  | 668d975039 | ||
|  | 9ab3406ddb | ||
|  | 1b91a2e2cf | ||
|  | 2c288bda42 | ||
|  | 0b8c922da9 | ||
|  | 3fe294e4ef | ||
|  | 921a145592 | ||
|  | 0c24eed73a | ||
|  | 29ce2c1201 | ||
|  | 532c74ae86 | ||
|  | 9beb5af82e | ||
|  | 9e6dd23876 | ||
|  | 7a8501e307 | ||
|  | 781cc523af | ||
|  | c6f45d4314 | ||
|  | d11d05d07a | ||
|  | e179aadfdf | ||
|  | d6a9615347 | ||
|  | c6306eb798 | ||
|  | bcfde70d73 | ||
|  | 53e893615d | ||
|  | 303692b5ed | ||
|  | 58ca755f40 | ||
|  | 770234afa2 | ||
|  | d77c3dfd02 | ||
|  | c23d8a74dc | ||
|  | 74a5ff5f43 | ||
|  | 071940680f | ||
|  | 69d3b2d824 | ||
|  | d891ff9fd9 | ||
|  | 6af22cf0ef | ||
|  | fff24d5e35 | ||
|  | ceba827e9a | ||
|  | a0432a1e80 | ||
|  | cfcf32d038 | ||
|  | a67bdc34fa | ||
|  | b3a653c245 | ||
|  | 4a34b7252e | ||
|  | 7e45ec57a8 | ||
|  | afbaa80b8b | ||
|  | 115d243428 | ||
|  | 7151f63a5f | ||
|  | 597e7b1805 | ||
|  | 2934c2ce43 | ||
|  | 0f6e296a8e | ||
|  | 9c228928b6 | ||
|  | ff3a2b8eab | ||
|  | c4105fa035 | ||
|  | 871dbd3c92 | ||
|  | c9ed14e6d6 | ||
|  | 1ad85e5061 | ||
|  | 09fbc6c952 | ||
|  | 895ec266bb | ||
|  | d85448f3bb | ||
|  | 99d46e8c27 | ||
|  | 4afdff39d7 | ||
|  | 661a807c65 | ||
|  | 6d58c4546e | ||
|  | 38ffbc0222 | ||
|  | fefb166c52 | ||
|  | dcb3c22e0b | ||
|  | 47a53c9e46 | ||
|  | 1413cd87eb | ||
|  | c92e184f75 | ||
|  | 3906e6ce60 | ||
|  | c7d3c3db0d | ||
|  | d6639d05c2 | ||
|  | 633cf7cbad | ||
|  | a5647b79ce | 
							
								
								
									
										16
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										16
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +1,19 @@ | ||||
| *.pyc | ||||
| *.pyo | ||||
| *~ | ||||
| *.DS_Store | ||||
| wine-py2exe/ | ||||
| py2exe.log | ||||
| *.kate-swp | ||||
| build/ | ||||
| dist/ | ||||
| MANIFEST | ||||
| README.txt | ||||
| youtube-dl.1 | ||||
| youtube-dl.bash-completion | ||||
| youtube-dl | ||||
| youtube-dl.exe | ||||
| youtube-dl.tar.gz | ||||
| .coverage | ||||
| cover/ | ||||
| updates_key.pem | ||||
|   | ||||
							
								
								
									
										14
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| language: python | ||||
| python: | ||||
|   - "2.6" | ||||
|   - "2.7" | ||||
|   - "3.3" | ||||
| script: nosetests test --verbose | ||||
| notifications: | ||||
|   email: | ||||
|     - filippo.valsorda@gmail.com | ||||
|     - phihag@phihag.de | ||||
| #  irc: | ||||
| #    channels: | ||||
| #      - "irc.freenode.org#youtube-dl" | ||||
| #    skip_join: true | ||||
							
								
								
									
										14
									
								
								CHANGELOG
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								CHANGELOG
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| 2013.01.02  Codename: GIULIA | ||||
|  | ||||
|     * Add support for ComedyCentral clips <nto> | ||||
|     * Corrected Vimeo description fetching <Nick Daniels> | ||||
|     * Added the --no-post-overwrites argument <Barbu Paul - Gheorghe> | ||||
|     * --verbose offers more environment info | ||||
|     * New info_dict field: uploader_id | ||||
|     * New updates system, with signature checking | ||||
|     * New IEs: NBA, JustinTV, FunnyOrDie, TweetReel, Steam, Ustream | ||||
|     * Fixed IEs: BlipTv | ||||
|     * Fixed for Python 3 IEs: Xvideo, Youku, XNXX, Dailymotion, Vimeo, InfoQ | ||||
|     * Simplified IEs and test code | ||||
|     * Various (Python 3 and other) fixes | ||||
|     * Revamped and expanded tests | ||||
| @@ -1 +1 @@ | ||||
| 2011.12.15 | ||||
| 2012.12.99 | ||||
|   | ||||
							
								
								
									
										24
									
								
								LICENSE
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								LICENSE
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| This is free and unencumbered software released into the public domain. | ||||
|  | ||||
| Anyone is free to copy, modify, publish, use, compile, sell, or | ||||
| distribute this software, either in source code form or as a compiled | ||||
| binary, for any purpose, commercial or non-commercial, and by any | ||||
| means. | ||||
|  | ||||
| In jurisdictions that recognize copyright laws, the author or authors | ||||
| of this software dedicate any and all copyright interest in the | ||||
| software to the public domain. We make this dedication for the benefit | ||||
| of the public at large and to the detriment of our heirs and | ||||
| successors. We intend this dedication to be an overt act of | ||||
| relinquishment in perpetuity of all present and future rights to this | ||||
| software under copyright law. | ||||
|  | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | ||||
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | ||||
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. | ||||
| IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR | ||||
| OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, | ||||
| ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | ||||
| OTHER DEALINGS IN THE SOFTWARE. | ||||
|  | ||||
| For more information, please refer to <http://unlicense.org/> | ||||
							
								
								
									
										3
									
								
								MANIFEST.in
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								MANIFEST.in
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| include README.md | ||||
| include test/*.py | ||||
| include test/*.json | ||||
							
								
								
									
										74
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										74
									
								
								Makefile
									
									
									
									
									
								
							| @@ -1,23 +1,61 @@ | ||||
| default: update | ||||
| all: youtube-dl README.md README.txt youtube-dl.1 youtube-dl.bash-completion | ||||
|  | ||||
| update: compile update-readme update-latest | ||||
| clean: | ||||
| 	rm -rf youtube-dl youtube-dl.exe youtube-dl.1 youtube-dl.bash-completion README.txt MANIFEST build/ dist/ .coverage cover/ youtube-dl.tar.gz | ||||
|  | ||||
| update-latest: | ||||
| 	./youtube-dl.dev --version > LATEST_VERSION | ||||
| PREFIX=/usr/local | ||||
| BINDIR=$(PREFIX)/bin | ||||
| MANDIR=$(PREFIX)/man | ||||
| SYSCONFDIR=/etc | ||||
| PYTHON=/usr/bin/env python | ||||
|  | ||||
| update-readme: | ||||
| 	@options=$$(COLUMNS=80 ./youtube-dl.dev --help | sed -e '1,/.*General Options.*/ d' -e 's/^\W\{2\}\(\w\)/### \1/') && \ | ||||
| 		header=$$(sed -e '/.*## OPTIONS/,$$ d' README.md) && \ | ||||
| 		footer=$$(sed -e '1,/.*## FAQ/ d' README.md) && \ | ||||
| 		echo "$${header}" > README.md && \ | ||||
| 		echo >> README.md && \ | ||||
| 		echo '## OPTIONS' >> README.md && \ | ||||
| 		echo "$${options}" >> README.md&& \ | ||||
| 		echo >> README.md && \ | ||||
| 		echo '## FAQ' >> README.md && \ | ||||
| 		echo "$${footer}" >> README.md | ||||
| install: youtube-dl youtube-dl.1 youtube-dl.bash-completion | ||||
| 	install -d $(DESTDIR)$(BINDIR) | ||||
| 	install -m 755 youtube-dl $(DESTDIR)$(BINDIR) | ||||
| 	install -d $(DESTDIR)$(MANDIR)/man1 | ||||
| 	install -m 644 youtube-dl.1 $(DESTDIR)$(MANDIR)/man1 | ||||
| 	install -d $(DESTDIR)$(SYSCONFDIR)/bash_completion.d | ||||
| 	install -m 644 youtube-dl.bash-completion $(DESTDIR)$(SYSCONFDIR)/bash_completion.d/youtube-dl | ||||
|  | ||||
| compile: | ||||
| 	cp youtube_dl/__init__.py youtube-dl | ||||
| test: | ||||
| 	#nosetests --with-coverage --cover-package=youtube_dl --cover-html --verbose --processes 4 test | ||||
| 	nosetests --verbose test | ||||
|  | ||||
| .PHONY: default compile update update-latest update-readme | ||||
| tar: youtube-dl.tar.gz | ||||
|  | ||||
| .PHONY: all clean install test tar | ||||
|  | ||||
| youtube-dl: youtube_dl/*.py | ||||
| 	zip --quiet youtube-dl youtube_dl/*.py | ||||
| 	zip --quiet --junk-paths youtube-dl youtube_dl/__main__.py | ||||
| 	echo '#!$(PYTHON)' > youtube-dl | ||||
| 	cat youtube-dl.zip >> youtube-dl | ||||
| 	rm youtube-dl.zip | ||||
| 	chmod a+x youtube-dl | ||||
|  | ||||
| README.md: youtube_dl/*.py | ||||
| 	COLUMNS=80 python -m youtube_dl --help | python devscripts/make_readme.py | ||||
|  | ||||
| README.txt: README.md | ||||
| 	pandoc -f markdown -t plain README.md -o README.txt | ||||
|  | ||||
| youtube-dl.1: README.md | ||||
| 	pandoc -s -f markdown -t man README.md -o youtube-dl.1 | ||||
|  | ||||
| youtube-dl.bash-completion: youtube_dl/*.py devscripts/bash-completion.in | ||||
| 	python devscripts/bash-completion.py | ||||
|  | ||||
| youtube-dl.tar.gz: youtube-dl README.md README.txt youtube-dl.1 youtube-dl.bash-completion | ||||
| 	@tar -czf youtube-dl.tar.gz --transform "s|^|youtube-dl/|" --owner 0 --group 0 \ | ||||
| 		--exclude '*.DS_Store' \ | ||||
| 		--exclude '*.kate-swp' \ | ||||
| 		--exclude '*.pyc' \ | ||||
| 		--exclude '*.pyo' \ | ||||
| 		--exclude '*~' \ | ||||
| 		--exclude '__pycache' \ | ||||
| 		--exclude '.git' \ | ||||
| 		-- \ | ||||
| 		bin devscripts test youtube_dl \ | ||||
| 		CHANGELOG LICENSE README.md README.txt \ | ||||
| 		Makefile MANIFEST.in youtube-dl.1 youtube-dl.bash-completion setup.py \ | ||||
| 		youtube-dl | ||||
|   | ||||
							
								
								
									
										142
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										142
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,27 +1,36 @@ | ||||
| # youtube-dl | ||||
| % YOUTUBE-DL(1) | ||||
|  | ||||
| ## USAGE | ||||
| youtube-dl [options] url [url...] | ||||
| # NAME | ||||
| youtube-dl | ||||
|  | ||||
| ## DESCRIPTION | ||||
| # SYNOPSIS | ||||
| **youtube-dl** [OPTIONS] URL [URL...] | ||||
|  | ||||
| # DESCRIPTION | ||||
| **youtube-dl** is a small command-line program to download videos from | ||||
| YouTube.com and a few more sites. It requires the Python interpreter, version | ||||
| 2.x (x being at least 5), and it is not platform specific. It should work in | ||||
| your Unix box, in Windows or in Mac OS X. It is released to the public domain, | ||||
| 2.6, 2.7, or 3.3+, and it is not platform specific. It should work on | ||||
| your Unix box, on Windows or on Mac OS X. It is released to the public domain, | ||||
| which means you can modify it, redistribute it or use it however you like. | ||||
|  | ||||
| ## OPTIONS | ||||
| # OPTIONS | ||||
|     -h, --help               print this help text and exit | ||||
|     -v, --version            print program version and exit | ||||
|     --version                print program version and exit | ||||
|     -U, --update             update this program to latest version | ||||
|     -i, --ignore-errors      continue on download errors | ||||
|     -r, --rate-limit LIMIT   download rate limit (e.g. 50k or 44.6m) | ||||
|     -R, --retries RETRIES    number of retries (default is 10) | ||||
|     --buffer-size SIZE       size of download buffer (e.g. 1024 or 16k) (default | ||||
|                              is 1024) | ||||
|     --no-resize-buffer       do not automatically adjust the buffer size. By | ||||
|                              default, the buffer size is automatically resized | ||||
|                              from an initial value of SIZE. | ||||
|     --dump-user-agent        display the current browser identification | ||||
|     --user-agent UA          specify a custom user agent | ||||
|     --list-extractors        List all supported extractors and the URLs they | ||||
|                              would handle | ||||
|  | ||||
| ### Video Selection: | ||||
| ## Video Selection: | ||||
|     --playlist-start NUMBER  playlist video to start at (default is 1) | ||||
|     --playlist-end NUMBER    playlist video to end at (default is last) | ||||
|     --match-title REGEX      download only matching titles (regex or caseless | ||||
| @@ -29,18 +38,30 @@ which means you can modify it, redistribute it or use it however you like. | ||||
|     --reject-title REGEX     skip download for matching titles (regex or | ||||
|                              caseless sub-string) | ||||
|     --max-downloads NUMBER   Abort after downloading NUMBER files | ||||
|     --min-filesize SIZE      Do not download any videos smaller than SIZE (e.g. | ||||
|                              50k or 44.6m) | ||||
|     --max-filesize SIZE      Do not download any videos larger than SIZE (e.g. | ||||
|                              50k or 44.6m) | ||||
|  | ||||
| ### Filesystem Options: | ||||
| ## Filesystem Options: | ||||
|     -t, --title              use title in file name | ||||
|     -l, --literal            use literal title in file name | ||||
|     --id                     use video ID in file name | ||||
|     -l, --literal            [deprecated] alias of --title | ||||
|     -A, --auto-number        number downloaded files starting from 00000 | ||||
|     -o, --output TEMPLATE    output filename template. Use %(stitle)s to get the | ||||
|     -o, --output TEMPLATE    output filename template. Use %(title)s to get the | ||||
|                              title, %(uploader)s for the uploader name, | ||||
|                              %(autonumber)s to get an automatically incremented | ||||
|                              number, %(ext)s for the filename extension, | ||||
|                              %(upload_date)s for the upload date (YYYYMMDD), and | ||||
|                              %% for a literal percent. Use - to output to | ||||
|                              stdout. | ||||
|                              %(uploader_id)s for the uploader nickname if | ||||
|                              different, %(autonumber)s to get an automatically | ||||
|                              incremented number, %(ext)s for the filename | ||||
|                              extension, %(upload_date)s for the upload date | ||||
|                              (YYYYMMDD), %(extractor)s for the provider | ||||
|                              (youtube, metacafe, etc), %(id)s for the video id | ||||
|                              and %% for a literal percent. Use - to output to | ||||
|                              stdout. Can also be used to download to a different | ||||
|                              directory, for example with -o '/my/downloads/%(upl | ||||
|                              oader)s/%(title)s-%(id)s.%(ext)s' . | ||||
|     --restrict-filenames     Restrict filenames to only ASCII characters, and | ||||
|                              avoid "&" and spaces in filenames | ||||
|     -a, --batch-file FILE    file containing URLs to download ('-' for stdin) | ||||
|     -w, --no-overwrites      do not overwrite files | ||||
|     -c, --continue           resume partially downloaded files | ||||
| @@ -53,7 +74,7 @@ which means you can modify it, redistribute it or use it however you like. | ||||
|     --write-description      write video description to a .description file | ||||
|     --write-info-json        write video metadata to a .info.json file | ||||
|  | ||||
| ### Verbosity / Simulation Options: | ||||
| ## Verbosity / Simulation Options: | ||||
|     -q, --quiet              activates quiet mode | ||||
|     -s, --simulate           do not download the video and do not write anything | ||||
|                              to disk | ||||
| @@ -66,30 +87,67 @@ which means you can modify it, redistribute it or use it however you like. | ||||
|     --get-format             simulate, quiet but print output format | ||||
|     --no-progress            do not print progress bar | ||||
|     --console-title          display progress in console titlebar | ||||
|     -v, --verbose            print various debugging information | ||||
|  | ||||
| ### Video Format Options: | ||||
| ## Video Format Options: | ||||
|     -f, --format FORMAT      video format code | ||||
|     --all-formats            download all available video formats | ||||
|     --prefer-free-formats    prefer free video formats unless a specific one is | ||||
|                              requested | ||||
|     --max-quality FORMAT     highest quality format to download | ||||
|     -F, --list-formats       list all available formats (currently youtube only) | ||||
|     --write-srt              write video closed captions to a .srt file | ||||
|                              (currently youtube only) | ||||
|     --srt-lang LANG          language of the closed captions to download | ||||
|                              (optional) use IETF language tags like 'en' | ||||
|  | ||||
| ### Authentication Options: | ||||
| ## Authentication Options: | ||||
|     -u, --username USERNAME  account username | ||||
|     -p, --password PASSWORD  account password | ||||
|     -n, --netrc              use .netrc authentication data | ||||
|  | ||||
| ### Post-processing Options: | ||||
|     --extract-audio          convert video files to audio-only files (requires | ||||
|                              ffmpeg and ffprobe) | ||||
|     --audio-format FORMAT    "best", "aac", "vorbis", "mp3", or "m4a"; best by | ||||
|                              default | ||||
|     --audio-quality QUALITY  ffmpeg audio bitrate specification, 128k by default | ||||
| ## Post-processing Options: | ||||
|     -x, --extract-audio      convert video files to audio-only files (requires | ||||
|                              ffmpeg or avconv and ffprobe or avprobe) | ||||
|     --audio-format FORMAT    "best", "aac", "vorbis", "mp3", "m4a", "opus", or | ||||
|                              "wav"; best by default | ||||
|     --audio-quality QUALITY  ffmpeg/avconv audio quality specification, insert a | ||||
|                              value between 0 (better) and 9 (worse) for VBR or a | ||||
|                              specific bitrate like 128K (default 5) | ||||
|     --recode-video FORMAT    Encode the video to another format if necessary | ||||
|                              (currently supported: mp4|flv|ogg|webm) | ||||
|     -k, --keep-video         keeps the video file on disk after the post- | ||||
|                              processing; the video is erased by default | ||||
|     --no-post-overwrites     do not overwrite post-processed files; the post- | ||||
|                              processed files are overwritten by default | ||||
|  | ||||
| ## FAQ | ||||
| # CONFIGURATION | ||||
|  | ||||
| You can configure youtube-dl by placing default arguments (such as `--extract-audio --no-mtime` to always extract the audio and not copy the mtime) into `/etc/youtube-dl.conf` and/or `~/.config/youtube-dl.conf`. | ||||
|  | ||||
| # OUTPUT TEMPLATE | ||||
|  | ||||
| The `-o` option allows users to indicate a template for the output file names. The basic usage is not to set any template arguments when downloading a single file, like in `youtube-dl -o funny_video.flv "http://some/video"`. However, it may contain special sequences that will be replaced when downloading each video. The special sequences have the format `%(NAME)s`. To clarify, that is a percent symbol followed by a name in parenthesis, followed by a lowercase S. Allowed names are: | ||||
|  | ||||
|  - `id`: The sequence will be replaced by the video identifier. | ||||
|  - `url`: The sequence will be replaced by the video URL. | ||||
|  - `uploader`: The sequence will be replaced by the nickname of the person who uploaded the video. | ||||
|  - `upload_date`: The sequence will be replaced by the upload date in YYYYMMDD format. | ||||
|  - `title`: The sequence will be replaced by the video title. | ||||
|  - `ext`: The sequence will be replaced by the appropriate extension (like flv or mp4). | ||||
|  - `epoch`: The sequence will be replaced by the Unix epoch when creating the file. | ||||
|  - `autonumber`: The sequence will be replaced by a five-digit number that will be increased with each download, starting at zero. | ||||
|  | ||||
| The current default template is `%(id)s.%(ext)s`, but that will be switchted to `%(title)s-%(id)s.%(ext)s` (which can be requested with `-t` at the moment). | ||||
|  | ||||
| In some cases, you don't want special characters such as 中, spaces, or &, such as when transferring the downloaded filename to a Windows system or the filename through an 8bit-unsafe channel. In these cases, add the `--restrict-filenames` flag to get a shorter title: | ||||
|  | ||||
|     $ youtube-dl --get-filename -o "%(title)s.%(ext)s" BaW_jenozKc | ||||
|     youtube-dl test video ''_ä↭𝕐.mp4    # All kinds of weird characters | ||||
|     $ youtube-dl --get-filename -o "%(title)s.%(ext)s" BaW_jenozKc --restrict-filenames | ||||
|     youtube-dl_test_video_.mp4          # A simple file name | ||||
|  | ||||
| # FAQ | ||||
|  | ||||
| ### Can you please put the -b option back? | ||||
|  | ||||
| @@ -105,25 +163,49 @@ Once the video is fully downloaded, use any video player, such as [vlc](http://w | ||||
|  | ||||
| ### The links provided by youtube-dl -g are not working anymore | ||||
|  | ||||
| The URLs youtube-dl outputs require the downloader to have the correct cookies. Use the `--cookies` option to write the required cookies into a file, and advise your downloader to read cookies from that file. | ||||
| The URLs youtube-dl outputs require the downloader to have the correct cookies. Use the `--cookies` option to write the required cookies into a file, and advise your downloader to read cookies from that file. Some sites also require a common user agent to be used, use `--dump-user-agent` to see the one in use by youtube-dl. | ||||
|  | ||||
| ### ERROR: no fmt_url_map or conn information found in video info | ||||
|  | ||||
| youtube has switched to a new video info format in July 2011 which is not supported by old versions of youtube-dl. You can update youtube-dl with `sudo youtube-dl --update`. | ||||
|  | ||||
| ## COPYRIGHT | ||||
| ### ERROR: unable to download video ### | ||||
|  | ||||
| youtube requires an additional signature since September 2012 which is not supported by old versions of youtube-dl. You can update youtube-dl with `sudo youtube-dl --update`. | ||||
|  | ||||
| ### SyntaxError: Non-ASCII character ### | ||||
|  | ||||
| The error | ||||
|  | ||||
|     File "youtube-dl", line 2 | ||||
|     SyntaxError: Non-ASCII character '\x93' ... | ||||
|  | ||||
| means you're using an outdated version of Python. Please update to Python 2.6 or 2.7. | ||||
|  | ||||
| ### What is this binary file? Where has the code gone? | ||||
|  | ||||
| Since June 2012 (#342) youtube-dl is packed as an executable zipfile, simply unzip it (might need renaming to `youtube-dl.zip` first on some systems) or clone the git repository, as laid out above. If you modify the code, you can run it by executing the `__main__.py` file. To recompile the executable, run `make youtube-dl`. | ||||
|  | ||||
| ### The exe throws a *Runtime error from Visual C++* | ||||
|  | ||||
| To run the exe you need to install first the [Microsoft Visual C++ 2008 Redistributable Package](http://www.microsoft.com/en-us/download/details.aspx?id=29). | ||||
|  | ||||
| # COPYRIGHT | ||||
|  | ||||
| youtube-dl is released into the public domain by the copyright holders. | ||||
|  | ||||
| This README file was originally written by Daniel Bolton (<https://github.com/dbbolton>) and is likewise released into the public domain. | ||||
|  | ||||
| ## BUGS | ||||
| # BUGS | ||||
|  | ||||
| Bugs and suggestions should be reported at: <https://github.com/rg3/youtube-dl/issues> | ||||
|  | ||||
| Please include: | ||||
|  | ||||
| * Your exact command line, like `youtube-dl -t "http://www.youtube.com/watch?v=uHlDtZ6Oc3s&feature=channel_video_title"`. A common mistake is not to escape the `&`. Putting URLs in quotes should solve this problem. | ||||
| * If possible re-run the command with `--verbose`, and include the full output, it is really helpful to us. | ||||
| * The output of `youtube-dl --version` | ||||
| * The output of `python --version` | ||||
| * The name and version of your Operating System ("Ubuntu 11.04 x64" or "Windows 7 x64" is usually enough). | ||||
|  | ||||
| For discussions, join us in the irc channel #youtube-dl on freenode. | ||||
|   | ||||
							
								
								
									
										6
									
								
								bin/youtube-dl
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										6
									
								
								bin/youtube-dl
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import youtube_dl | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     youtube_dl.main() | ||||
							
								
								
									
										
											BIN
										
									
								
								devscripts/SizeOfImage.patch
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								devscripts/SizeOfImage.patch
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								devscripts/SizeOfImage_w.patch
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								devscripts/SizeOfImage_w.patch
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										14
									
								
								devscripts/bash-completion.in
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								devscripts/bash-completion.in
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| __youtube-dl() | ||||
| { | ||||
|     local cur prev opts | ||||
|     COMPREPLY=() | ||||
|     cur="${COMP_WORDS[COMP_CWORD]}" | ||||
|     opts="{{flags}}" | ||||
|  | ||||
|     if [[ ${cur} == * ]] ; then | ||||
|         COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
|         return 0 | ||||
|     fi | ||||
| } | ||||
|  | ||||
| complete -F __youtube-dl youtube-dl | ||||
							
								
								
									
										26
									
								
								devscripts/bash-completion.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										26
									
								
								devscripts/bash-completion.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| #!/usr/bin/env python | ||||
| import os | ||||
| from os.path import dirname as dirn | ||||
| import sys | ||||
|  | ||||
| sys.path.append(dirn(dirn((os.path.abspath(__file__))))) | ||||
| import youtube_dl | ||||
|  | ||||
| BASH_COMPLETION_FILE = "youtube-dl.bash-completion" | ||||
| BASH_COMPLETION_TEMPLATE = "devscripts/bash-completion.in" | ||||
|  | ||||
| def build_completion(opt_parser): | ||||
|     opts_flag = [] | ||||
|     for group in opt_parser.option_groups: | ||||
|         for option in group.option_list: | ||||
|             #for every long flag | ||||
|             opts_flag.append(option.get_opt_string()) | ||||
|     with open(BASH_COMPLETION_TEMPLATE) as f: | ||||
|         template = f.read() | ||||
|     with open(BASH_COMPLETION_FILE, "w") as f: | ||||
|         #just using the special char | ||||
|         filled_template = template.replace("{{flags}}", " ".join(opts_flag)) | ||||
|         f.write(filled_template) | ||||
|  | ||||
| parser = youtube_dl.parseOpts()[0] | ||||
| build_completion(parser) | ||||
							
								
								
									
										33
									
								
								devscripts/gh-pages/add-version.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										33
									
								
								devscripts/gh-pages/add-version.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,33 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import json | ||||
| import sys | ||||
| import hashlib | ||||
| import urllib.request | ||||
|  | ||||
| if len(sys.argv) <= 1: | ||||
| 	print('Specify the version number as parameter') | ||||
| 	sys.exit() | ||||
| version = sys.argv[1] | ||||
|  | ||||
| with open('update/LATEST_VERSION', 'w') as f: | ||||
| 	f.write(version) | ||||
|  | ||||
| versions_info = json.load(open('update/versions.json')) | ||||
| if 'signature' in versions_info: | ||||
| 	del versions_info['signature'] | ||||
|  | ||||
| new_version = {} | ||||
|  | ||||
| filenames = {'bin': 'youtube-dl', 'exe': 'youtube-dl.exe', 'tar': 'youtube-dl-%s.tar.gz' % version} | ||||
| for key, filename in filenames.items(): | ||||
| 	print('Downloading and checksumming %s...' %filename) | ||||
| 	url = 'http://youtube-dl.org/downloads/%s/%s' % (version, filename) | ||||
| 	data = urllib.request.urlopen(url).read() | ||||
| 	sha256sum = hashlib.sha256(data).hexdigest() | ||||
| 	new_version[key] = (url, sha256sum) | ||||
|  | ||||
| versions_info['versions'][version] = new_version | ||||
| versions_info['latest'] = version | ||||
|  | ||||
| json.dump(versions_info, open('update/versions.json', 'w'), indent=4, sort_keys=True) | ||||
							
								
								
									
										32
									
								
								devscripts/gh-pages/generate-download.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										32
									
								
								devscripts/gh-pages/generate-download.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| #!/usr/bin/env python3 | ||||
| import hashlib | ||||
| import shutil | ||||
| import subprocess | ||||
| import tempfile | ||||
| import urllib.request | ||||
| import json | ||||
|  | ||||
| versions_info = json.load(open('update/versions.json')) | ||||
| version = versions_info['latest'] | ||||
| URL = versions_info['versions'][version]['bin'][0] | ||||
|  | ||||
| data = urllib.request.urlopen(URL).read() | ||||
|  | ||||
| # Read template page | ||||
| with open('download.html.in', 'r', encoding='utf-8') as tmplf: | ||||
|     template = tmplf.read() | ||||
|  | ||||
| md5sum = hashlib.md5(data).hexdigest() | ||||
| sha1sum = hashlib.sha1(data).hexdigest() | ||||
| sha256sum = hashlib.sha256(data).hexdigest() | ||||
| template = template.replace('@PROGRAM_VERSION@', version) | ||||
| template = template.replace('@PROGRAM_URL@', URL) | ||||
| template = template.replace('@PROGRAM_MD5SUM@', md5sum) | ||||
| template = template.replace('@PROGRAM_SHA1SUM@', sha1sum) | ||||
| template = template.replace('@PROGRAM_SHA256SUM@', sha256sum) | ||||
| template = template.replace('@EXE_URL@', versions_info['versions'][version]['exe'][0]) | ||||
| template = template.replace('@EXE_SHA256SUM@', versions_info['versions'][version]['exe'][1]) | ||||
| template = template.replace('@TAR_URL@', versions_info['versions'][version]['tar'][0]) | ||||
| template = template.replace('@TAR_SHA256SUM@', versions_info['versions'][version]['tar'][1]) | ||||
| with open('download.html', 'w', encoding='utf-8') as dlf: | ||||
|     dlf.write(template) | ||||
							
								
								
									
										32
									
								
								devscripts/gh-pages/sign-versions.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										32
									
								
								devscripts/gh-pages/sign-versions.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import rsa | ||||
| import json | ||||
| from binascii import hexlify | ||||
|  | ||||
| try: | ||||
|     input = raw_input | ||||
| except NameError: | ||||
|     pass | ||||
|  | ||||
| versions_info = json.load(open('update/versions.json')) | ||||
| if 'signature' in versions_info: | ||||
| 	del versions_info['signature'] | ||||
|  | ||||
| print('Enter the PKCS1 private key, followed by a blank line:') | ||||
| privkey = b'' | ||||
| while True: | ||||
| 	try: | ||||
| 		line = input() | ||||
| 	except EOFError: | ||||
| 		break | ||||
| 	if line == '': | ||||
| 		break | ||||
| 	privkey += line.encode('ascii') + b'\n' | ||||
| privkey = rsa.PrivateKey.load_pkcs1(privkey) | ||||
|  | ||||
| signature = hexlify(rsa.pkcs1.sign(json.dumps(versions_info, sort_keys=True).encode('utf-8'), privkey, 'SHA-256')).decode() | ||||
| print('signature: ' + signature) | ||||
|  | ||||
| versions_info['signature'] = signature | ||||
| json.dump(versions_info, open('update/versions.json', 'w'), indent=4, sort_keys=True) | ||||
							
								
								
									
										21
									
								
								devscripts/gh-pages/update-copyright.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										21
									
								
								devscripts/gh-pages/update-copyright.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| #!/usr/bin/env python | ||||
| # coding: utf-8 | ||||
|  | ||||
| from __future__ import with_statement | ||||
|  | ||||
| import datetime | ||||
| import glob | ||||
| import io # For Python 2 compatibilty | ||||
| import os | ||||
| import re | ||||
|  | ||||
| year = str(datetime.datetime.now().year) | ||||
| for fn in glob.glob('*.html*'): | ||||
|     with io.open(fn, encoding='utf-8') as f: | ||||
|         content = f.read() | ||||
|     newc = re.sub(u'(?P<copyright>Copyright © 2006-)(?P<year>[0-9]{4})', u'Copyright © 2006-' + year, content) | ||||
|     if content != newc: | ||||
|         tmpFn = fn + '.part' | ||||
|         with io.open(tmpFn, 'wt', encoding='utf-8') as outf: | ||||
|             outf.write(newc) | ||||
|         os.rename(tmpFn, fn) | ||||
							
								
								
									
										20
									
								
								devscripts/make_readme.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										20
									
								
								devscripts/make_readme.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| import sys | ||||
| import re | ||||
|  | ||||
| README_FILE = 'README.md' | ||||
| helptext = sys.stdin.read() | ||||
|  | ||||
| with open(README_FILE) as f: | ||||
|     oldreadme = f.read() | ||||
|  | ||||
| header = oldreadme[:oldreadme.index('# OPTIONS')] | ||||
| footer = oldreadme[oldreadme.index('# CONFIGURATION'):] | ||||
|  | ||||
| options = helptext[helptext.index('  General Options:')+19:] | ||||
| options = re.sub(r'^  (\w.+)$', r'## \1', options, flags=re.M) | ||||
| options = '# OPTIONS\n' + options + '\n' | ||||
|  | ||||
| with open(README_FILE, 'w') as f: | ||||
|     f.write(header) | ||||
|     f.write(options) | ||||
|     f.write(footer) | ||||
							
								
								
									
										6
									
								
								devscripts/posix-locale.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										6
									
								
								devscripts/posix-locale.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,6 @@ | ||||
|  | ||||
| # source this file in your shell to get a POSIX locale (which will break many programs, but that's kind of the point) | ||||
|  | ||||
| export LC_ALL=POSIX | ||||
| export LANG=POSIX | ||||
| export LANGUAGE=POSIX | ||||
							
								
								
									
										89
									
								
								devscripts/release.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										89
									
								
								devscripts/release.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,89 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| # IMPORTANT: the following assumptions are made | ||||
| # * the GH repo is on the origin remote | ||||
| # * the gh-pages branch is named so locally | ||||
| # * the git config user.signingkey is properly set | ||||
|  | ||||
| # You will need | ||||
| # pip install coverage nose rsa | ||||
|  | ||||
| # TODO | ||||
| # release notes | ||||
| # make hash on local files | ||||
|  | ||||
| set -e | ||||
|  | ||||
| if [ -z "$1" ]; then echo "ERROR: specify version number like this: $0 1994.09.06"; exit 1; fi | ||||
| version="$1" | ||||
| if [ ! -z "`git tag | grep "$version"`" ]; then echo 'ERROR: version already present'; exit 1; fi | ||||
| if [ ! -z "`git status --porcelain | grep -v CHANGELOG`" ]; then echo 'ERROR: the working directory is not clean; commit or stash changes'; exit 1; fi | ||||
| if [ ! -f "updates_key.pem" ]; then echo 'ERROR: updates_key.pem missing'; exit 1; fi | ||||
|  | ||||
| echo "\n### First of all, testing..." | ||||
| make clean | ||||
| nosetests --with-coverage --cover-package=youtube_dl --cover-html test || exit 1 | ||||
|  | ||||
| echo "\n### Changing version in version.py..." | ||||
| sed -i "s/__version__ = '.*'/__version__ = '$version'/" youtube_dl/version.py | ||||
|  | ||||
| echo "\n### Committing CHANGELOG README.md and youtube_dl/version.py..." | ||||
| make README.md | ||||
| git add CHANGELOG README.md youtube_dl/version.py | ||||
| git commit -m "release $version" | ||||
|  | ||||
| echo "\n### Now tagging, signing and pushing..." | ||||
| git tag -s -m "Release $version" "$version" | ||||
| git show "$version" | ||||
| read -p "Is it good, can I push? (y/n) " -n 1 | ||||
| if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi | ||||
| echo | ||||
| MASTER=$(git rev-parse --abbrev-ref HEAD) | ||||
| git push origin $MASTER:master | ||||
| git push origin "$version" | ||||
|  | ||||
| echo "\n### OK, now it is time to build the binaries..." | ||||
| REV=$(git rev-parse HEAD) | ||||
| make youtube-dl youtube-dl.tar.gz | ||||
| wget "http://jeromelaheurte.net:8142/download/rg3/youtube-dl/youtube-dl.exe?rev=$REV" -O youtube-dl.exe || \ | ||||
| 	wget "http://jeromelaheurte.net:8142/build/rg3/youtube-dl/youtube-dl.exe?rev=$REV" -O youtube-dl.exe | ||||
| mkdir -p "build/$version" | ||||
| mv youtube-dl youtube-dl.exe "build/$version" | ||||
| mv youtube-dl.tar.gz "build/$version/youtube-dl-$version.tar.gz" | ||||
| RELEASE_FILES="youtube-dl youtube-dl.exe youtube-dl-$version.tar.gz" | ||||
| (cd build/$version/ && md5sum $RELEASE_FILES > MD5SUMS) | ||||
| (cd build/$version/ && sha1sum $RELEASE_FILES > SHA1SUMS) | ||||
| (cd build/$version/ && sha256sum $RELEASE_FILES > SHA2-256SUMS) | ||||
| (cd build/$version/ && sha512sum $RELEASE_FILES > SHA2-512SUMS) | ||||
| git checkout HEAD -- youtube-dl youtube-dl.exe | ||||
|  | ||||
| echo "\n### Signing and uploading the new binaries to youtube-dl.org..." | ||||
| for f in $RELEASE_FILES; do gpg --detach-sig "build/$version/$f"; done | ||||
| scp -r "build/$version" ytdl@youtube-dl.org:html/downloads/ | ||||
|  | ||||
| echo "\n### Now switching to gh-pages..." | ||||
| git clone --branch gh-pages --single-branch . build/gh-pages | ||||
| ROOT=$(pwd) | ||||
| ( | ||||
|     set -e | ||||
|     ORIGIN_URL=$(git config --get remote.origin.url) | ||||
|     cd build/gh-pages | ||||
|     "$ROOT/devscripts/gh-pages/add-version.py" $version | ||||
|     "$ROOT/devscripts/gh-pages/sign-versions.py" < "$ROOT/updates_key.pem" | ||||
|     "$ROOT/devscripts/gh-pages/generate-download.py" | ||||
|     "$ROOT/devscripts/gh-pages/update-copyright.py" | ||||
|     git add *.html *.html.in update | ||||
|     git commit -m "release $version" | ||||
|     git show HEAD | ||||
|     read -p "Is it good, can I push? (y/n) " -n 1 | ||||
|     if [[ ! $REPLY =~ ^[Yy]$ ]]; then exit 1; fi | ||||
|     echo | ||||
|     git push "$ROOT" gh-pages | ||||
|     git push "$ORIGIN_URL" gh-pages | ||||
| ) | ||||
| rm -rf build | ||||
|  | ||||
| echo "Uploading to PyPi ..." | ||||
| pip sdist upload | ||||
|  | ||||
| echo "\n### DONE!" | ||||
							
								
								
									
										40
									
								
								devscripts/transition_helper.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								devscripts/transition_helper.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import sys, os | ||||
|  | ||||
| try: | ||||
|     import urllib.request as compat_urllib_request | ||||
| except ImportError: # Python 2 | ||||
|     import urllib2 as compat_urllib_request | ||||
|  | ||||
| sys.stderr.write(u'Hi! We changed distribution method and now youtube-dl needs to update itself one more time.\n') | ||||
| sys.stderr.write(u'This will only happen once. Simply press enter to go on. Sorry for the trouble!\n') | ||||
| sys.stderr.write(u'The new location of the binaries is https://github.com/rg3/youtube-dl/downloads, not the git repository.\n\n') | ||||
|  | ||||
| try: | ||||
| 	raw_input() | ||||
| except NameError: # Python 3 | ||||
| 	input() | ||||
|  | ||||
| filename = sys.argv[0] | ||||
|  | ||||
| API_URL = "https://api.github.com/repos/rg3/youtube-dl/downloads" | ||||
| BIN_URL = "https://github.com/downloads/rg3/youtube-dl/youtube-dl" | ||||
|  | ||||
| if not os.access(filename, os.W_OK): | ||||
|     sys.exit('ERROR: no write permissions on %s' % filename) | ||||
|  | ||||
| try: | ||||
|     urlh = compat_urllib_request.urlopen(BIN_URL) | ||||
|     newcontent = urlh.read() | ||||
|     urlh.close() | ||||
| except (IOError, OSError) as err: | ||||
|     sys.exit('ERROR: unable to download latest version') | ||||
|  | ||||
| try: | ||||
|     with open(filename, 'wb') as outf: | ||||
|         outf.write(newcontent) | ||||
| except (IOError, OSError) as err: | ||||
|     sys.exit('ERROR: unable to overwrite current version') | ||||
|  | ||||
| sys.stderr.write(u'Done! Now you can run youtube-dl.\n') | ||||
							
								
								
									
										12
									
								
								devscripts/transition_helper_exe/setup.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								devscripts/transition_helper_exe/setup.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| from distutils.core import setup | ||||
| import py2exe | ||||
|  | ||||
| py2exe_options = { | ||||
|     "bundle_files": 1, | ||||
|     "compressed": 1, | ||||
|     "optimize": 2, | ||||
|     "dist_dir": '.', | ||||
|     "dll_excludes": ['w9xpopen.exe'] | ||||
| } | ||||
|  | ||||
| setup(console=['youtube-dl.py'], options={ "py2exe": py2exe_options }, zipfile=None) | ||||
							
								
								
									
										102
									
								
								devscripts/transition_helper_exe/youtube-dl.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										102
									
								
								devscripts/transition_helper_exe/youtube-dl.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,102 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import sys, os | ||||
| import urllib2 | ||||
| import json, hashlib | ||||
|  | ||||
| def rsa_verify(message, signature, key): | ||||
|     from struct import pack | ||||
|     from hashlib import sha256 | ||||
|     from sys import version_info | ||||
|     def b(x): | ||||
|         if version_info[0] == 2: return x | ||||
|         else: return x.encode('latin1') | ||||
|     assert(type(message) == type(b(''))) | ||||
|     block_size = 0 | ||||
|     n = key[0] | ||||
|     while n: | ||||
|         block_size += 1 | ||||
|         n >>= 8 | ||||
|     signature = pow(int(signature, 16), key[1], key[0]) | ||||
|     raw_bytes = [] | ||||
|     while signature: | ||||
|         raw_bytes.insert(0, pack("B", signature & 0xFF)) | ||||
|         signature >>= 8 | ||||
|     signature = (block_size - len(raw_bytes)) * b('\x00') + b('').join(raw_bytes) | ||||
|     if signature[0:2] != b('\x00\x01'): return False | ||||
|     signature = signature[2:] | ||||
|     if not b('\x00') in signature: return False | ||||
|     signature = signature[signature.index(b('\x00'))+1:] | ||||
|     if not signature.startswith(b('\x30\x31\x30\x0D\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20')): return False | ||||
|     signature = signature[19:] | ||||
|     if signature != sha256(message).digest(): return False | ||||
|     return True | ||||
|  | ||||
| sys.stderr.write(u'Hi! We changed distribution method and now youtube-dl needs to update itself one more time.\n') | ||||
| sys.stderr.write(u'This will only happen once. Simply press enter to go on. Sorry for the trouble!\n') | ||||
| sys.stderr.write(u'From now on, get the binaries from http://rg3.github.com/youtube-dl/download.html, not from the git repository.\n\n') | ||||
|  | ||||
| raw_input() | ||||
|  | ||||
| filename = sys.argv[0] | ||||
|  | ||||
| UPDATE_URL = "http://rg3.github.com/youtube-dl/update/" | ||||
| VERSION_URL = UPDATE_URL + 'LATEST_VERSION' | ||||
| JSON_URL = UPDATE_URL + 'versions.json' | ||||
| UPDATES_RSA_KEY = (0x9d60ee4d8f805312fdb15a62f87b95bd66177b91df176765d13514a0f1754bcd2057295c5b6f1d35daa6742c3ffc9a82d3e118861c207995a8031e151d863c9927e304576bc80692bc8e094896fcf11b66f3e29e04e3a71e9a11558558acea1840aec37fc396fb6b65dc81a1c4144e03bd1c011de62e3f1357b327d08426fe93, 65537) | ||||
|  | ||||
| if not os.access(filename, os.W_OK): | ||||
|     sys.exit('ERROR: no write permissions on %s' % filename) | ||||
|  | ||||
| exe = os.path.abspath(filename) | ||||
| directory = os.path.dirname(exe) | ||||
| if not os.access(directory, os.W_OK): | ||||
|     sys.exit('ERROR: no write permissions on %s' % directory) | ||||
|  | ||||
| try: | ||||
|     versions_info = urllib2.urlopen(JSON_URL).read().decode('utf-8') | ||||
|     versions_info = json.loads(versions_info) | ||||
| except: | ||||
|     sys.exit(u'ERROR: can\'t obtain versions info. Please try again later.') | ||||
| if not 'signature' in versions_info: | ||||
|     sys.exit(u'ERROR: the versions file is not signed or corrupted. Aborting.') | ||||
| signature = versions_info['signature'] | ||||
| del versions_info['signature'] | ||||
| if not rsa_verify(json.dumps(versions_info, sort_keys=True), signature, UPDATES_RSA_KEY): | ||||
|     sys.exit(u'ERROR: the versions file signature is invalid. Aborting.') | ||||
|  | ||||
| version = versions_info['versions'][versions_info['latest']] | ||||
|  | ||||
| try: | ||||
|     urlh = urllib2.urlopen(version['exe'][0]) | ||||
|     newcontent = urlh.read() | ||||
|     urlh.close() | ||||
| except (IOError, OSError) as err: | ||||
|     sys.exit('ERROR: unable to download latest version') | ||||
|  | ||||
| newcontent_hash = hashlib.sha256(newcontent).hexdigest() | ||||
| if newcontent_hash != version['exe'][1]: | ||||
|     sys.exit(u'ERROR: the downloaded file hash does not match. Aborting.') | ||||
|  | ||||
| try: | ||||
|     with open(exe + '.new', 'wb') as outf: | ||||
|         outf.write(newcontent) | ||||
| except (IOError, OSError) as err: | ||||
|     sys.exit(u'ERROR: unable to write the new version') | ||||
|  | ||||
| try: | ||||
|     bat = os.path.join(directory, 'youtube-dl-updater.bat') | ||||
|     b = open(bat, 'w') | ||||
|     b.write(""" | ||||
| echo Updating youtube-dl... | ||||
| ping 127.0.0.1 -n 5 -w 1000 > NUL | ||||
| move /Y "%s.new" "%s" | ||||
| del "%s" | ||||
|     \n""" %(exe, exe, bat)) | ||||
|     b.close() | ||||
|  | ||||
|     os.startfile(bat) | ||||
| except (IOError, OSError) as err: | ||||
|     sys.exit('ERROR: unable to overwrite current version') | ||||
|  | ||||
| sys.stderr.write(u'Done! Now you can run youtube-dl.\n') | ||||
							
								
								
									
										56
									
								
								devscripts/wine-py2exe.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										56
									
								
								devscripts/wine-py2exe.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| # Run with as parameter a setup.py that works in the current directory | ||||
| # e.g. no os.chdir() | ||||
| # It will run twice, the first time will crash | ||||
|  | ||||
| set -e | ||||
|  | ||||
| SCRIPT_DIR="$( cd "$( dirname "$0" )" && pwd )" | ||||
|  | ||||
| if [ ! -d wine-py2exe ]; then | ||||
|  | ||||
|     sudo apt-get install wine1.3 axel bsdiff | ||||
|  | ||||
|     mkdir wine-py2exe | ||||
|     cd wine-py2exe | ||||
|     export WINEPREFIX=`pwd` | ||||
|  | ||||
|     axel -a "http://www.python.org/ftp/python/2.7/python-2.7.msi" | ||||
|     axel -a "http://downloads.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.win32-py2.7.exe" | ||||
|     #axel -a "http://winetricks.org/winetricks" | ||||
|  | ||||
|     # http://appdb.winehq.org/objectManager.php?sClass=version&iId=21957 | ||||
|     echo "Follow python setup on screen" | ||||
|     wine msiexec /i python-2.7.msi | ||||
|      | ||||
|     echo "Follow py2exe setup on screen" | ||||
|     wine py2exe-0.6.9.win32-py2.7.exe | ||||
|      | ||||
|     #echo "Follow Microsoft Visual C++ 2008 Redistributable Package setup on screen" | ||||
|     #bash winetricks vcrun2008 | ||||
|  | ||||
|     rm py2exe-0.6.9.win32-py2.7.exe | ||||
|     rm python-2.7.msi | ||||
|     #rm winetricks | ||||
|      | ||||
|     # http://bugs.winehq.org/show_bug.cgi?id=3591 | ||||
|      | ||||
|     mv drive_c/Python27/Lib/site-packages/py2exe/run.exe drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup | ||||
|     bspatch drive_c/Python27/Lib/site-packages/py2exe/run.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run.exe "$SCRIPT_DIR/SizeOfImage.patch" | ||||
|     mv drive_c/Python27/Lib/site-packages/py2exe/run_w.exe drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup | ||||
|     bspatch drive_c/Python27/Lib/site-packages/py2exe/run_w.exe.backup drive_c/Python27/Lib/site-packages/py2exe/run_w.exe "$SCRIPT_DIR/SizeOfImage_w.patch" | ||||
|  | ||||
|     cd - | ||||
|      | ||||
| else | ||||
|  | ||||
|     export WINEPREFIX="$( cd wine-py2exe && pwd )" | ||||
|  | ||||
| fi | ||||
|  | ||||
| wine "C:\\Python27\\python.exe" "$1" py2exe > "py2exe.log" 2>&1 || true | ||||
| echo '# Copying python27.dll' >> "py2exe.log" | ||||
| cp "$WINEPREFIX/drive_c/windows/system32/python27.dll" build/bdist.win32/winexe/bundle-2.7/ | ||||
| wine "C:\\Python27\\python.exe" "$1" py2exe >> "py2exe.log" 2>&1 | ||||
|  | ||||
							
								
								
									
										78
									
								
								setup.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										78
									
								
								setup.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,78 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| from __future__ import print_function | ||||
| import pkg_resources | ||||
| import sys | ||||
|  | ||||
| try: | ||||
|     from setuptools import setup | ||||
| except ImportError: | ||||
|     from distutils.core import setup | ||||
|  | ||||
| try: | ||||
|     import py2exe | ||||
|     """This will create an exe that needs Microsoft Visual C++ 2008 Redistributable Package""" | ||||
| except ImportError: | ||||
|     if len(sys.argv) >= 2 and sys.argv[1] == 'py2exe': | ||||
|         print("Cannot import py2exe", file=sys.stderr) | ||||
|         exit(1) | ||||
|  | ||||
| py2exe_options = { | ||||
|     "bundle_files": 1, | ||||
|     "compressed": 1, | ||||
|     "optimize": 2, | ||||
|     "dist_dir": '.', | ||||
|     "dll_excludes": ['w9xpopen.exe'] | ||||
| } | ||||
| py2exe_console = [{ | ||||
|     "script": "./youtube_dl/__main__.py", | ||||
|     "dest_base": "youtube-dl", | ||||
| }] | ||||
| py2exe_params = { | ||||
|     'console': py2exe_console, | ||||
|     'options': { "py2exe": py2exe_options }, | ||||
|     'zipfile': None | ||||
| } | ||||
|  | ||||
| if len(sys.argv) >= 2 and sys.argv[1] == 'py2exe': | ||||
|     params = py2exe_params | ||||
| else: | ||||
|     params = { | ||||
|         'scripts': ['bin/youtube-dl'], | ||||
|         'data_files': [('etc/bash_completion.d', ['youtube-dl.bash-completion']), # Installing system-wide would require sudo... | ||||
|                        ('share/doc/youtube_dl', ['README.txt']), | ||||
|                        ('share/man/man1/', ['youtube-dl.1'])] | ||||
|     } | ||||
|  | ||||
| # Get the version from youtube_dl/version.py without importing the package | ||||
| exec(compile(open('youtube_dl/version.py').read(), 'youtube_dl/version.py', 'exec')) | ||||
|  | ||||
| setup( | ||||
|     name = 'youtube_dl', | ||||
|     version = __version__, | ||||
|     description = 'YouTube video downloader', | ||||
|     long_description = 'Small command-line program to download videos from YouTube.com and other video sites.', | ||||
|     url = 'https://github.com/rg3/youtube-dl', | ||||
|     author = 'Ricardo Garcia', | ||||
|     maintainer = 'Philipp Hagemeister', | ||||
|     maintainer_email = 'phihag@phihag.de', | ||||
|     packages = ['youtube_dl'], | ||||
|  | ||||
|     # Provokes warning on most systems (why?!) | ||||
|     #test_suite = 'nose.collector', | ||||
|     #test_requires = ['nosetest'], | ||||
|  | ||||
|     classifiers = [ | ||||
|         "Topic :: Multimedia :: Video", | ||||
|         "Development Status :: 5 - Production/Stable", | ||||
|         "Environment :: Console", | ||||
|         "License :: Public Domain", | ||||
|         "Programming Language :: Python :: 2.6", | ||||
|         "Programming Language :: Python :: 2.7", | ||||
|         "Programming Language :: Python :: 3", | ||||
|         "Programming Language :: Python :: 3.3" | ||||
|     ], | ||||
|  | ||||
|     **params | ||||
| ) | ||||
							
								
								
									
										40
									
								
								test/parameters.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								test/parameters.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| { | ||||
|     "consoletitle": false,  | ||||
|     "continuedl": true,  | ||||
|     "forcedescription": false,  | ||||
|     "forcefilename": false,  | ||||
|     "forceformat": false,  | ||||
|     "forcethumbnail": false,  | ||||
|     "forcetitle": false,  | ||||
|     "forceurl": false,  | ||||
|     "format": null,  | ||||
|     "format_limit": null,  | ||||
|     "ignoreerrors": false,  | ||||
|     "listformats": null,  | ||||
|     "logtostderr": false,  | ||||
|     "matchtitle": null,  | ||||
|     "max_downloads": null,  | ||||
|     "nooverwrites": false,  | ||||
|     "nopart": false,  | ||||
|     "noprogress": false,  | ||||
|     "outtmpl": "%(id)s.%(ext)s",  | ||||
|     "password": null,  | ||||
|     "playlistend": -1,  | ||||
|     "playliststart": 1,  | ||||
|     "prefer_free_formats": false,  | ||||
|     "quiet": false,  | ||||
|     "ratelimit": null,  | ||||
|     "rejecttitle": null,  | ||||
|     "retries": 10,  | ||||
|     "simulate": false,  | ||||
|     "skip_download": false,  | ||||
|     "subtitleslang": null,  | ||||
|     "test": true,  | ||||
|     "updatetime": true,  | ||||
|     "usenetrc": false,  | ||||
|     "username": null,  | ||||
|     "verbose": true,  | ||||
|     "writedescription": false,  | ||||
|     "writeinfojson": true,  | ||||
|     "writesubtitles": false | ||||
| } | ||||
							
								
								
									
										27
									
								
								test/test_all_urls.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										27
									
								
								test/test_all_urls.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,27 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import sys | ||||
| import unittest | ||||
|  | ||||
| # Allow direct execution | ||||
| import os | ||||
| sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | ||||
|  | ||||
| from youtube_dl.InfoExtractors import YoutubeIE, YoutubePlaylistIE | ||||
|  | ||||
| class TestAllURLsMatching(unittest.TestCase): | ||||
|     def test_youtube_playlist_matching(self): | ||||
|         self.assertTrue(YoutubePlaylistIE().suitable(u'ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8')) | ||||
|         self.assertTrue(YoutubePlaylistIE().suitable(u'PL63F0C78739B09958')) | ||||
|         self.assertFalse(YoutubePlaylistIE().suitable(u'PLtS2H6bU1M')) | ||||
|  | ||||
|     def test_youtube_matching(self): | ||||
|         self.assertTrue(YoutubeIE().suitable(u'PLtS2H6bU1M')) | ||||
|  | ||||
|     def test_youtube_extract(self): | ||||
|         self.assertEqual(YoutubeIE()._extract_id('http://www.youtube.com/watch?&v=BaW_jenozKc'), 'BaW_jenozKc') | ||||
|         self.assertEqual(YoutubeIE()._extract_id('https://www.youtube.com/watch?&v=BaW_jenozKc'), 'BaW_jenozKc') | ||||
|         self.assertEqual(YoutubeIE()._extract_id('https://www.youtube.com/watch?feature=player_embedded&v=BaW_jenozKc'), 'BaW_jenozKc') | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
| @@ -1,29 +0,0 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Various small unit tests | ||||
|  | ||||
| import os,sys | ||||
| sys.path.append(os.path.dirname(os.path.dirname(__file__))) | ||||
|  | ||||
| import youtube_dl | ||||
|  | ||||
| def test_simplify_title(): | ||||
| 	assert youtube_dl._simplify_title(u'abc') == u'abc' | ||||
| 	assert youtube_dl._simplify_title(u'abc_d-e') == u'abc_d-e' | ||||
|  | ||||
| 	assert youtube_dl._simplify_title(u'123') == u'123' | ||||
|  | ||||
| 	assert u'/' not in youtube_dl._simplify_title(u'abc/de') | ||||
| 	assert u'abc' in youtube_dl._simplify_title(u'abc/de') | ||||
| 	assert u'de' in youtube_dl._simplify_title(u'abc/de') | ||||
| 	assert u'/' not in youtube_dl._simplify_title(u'abc/de///') | ||||
|  | ||||
| 	assert u'\\' not in youtube_dl._simplify_title(u'abc\\de') | ||||
| 	assert u'abc' in youtube_dl._simplify_title(u'abc\\de') | ||||
| 	assert u'de' in youtube_dl._simplify_title(u'abc\\de') | ||||
|  | ||||
| 	assert youtube_dl._simplify_title(u'ä') == u'ä' | ||||
| 	assert youtube_dl._simplify_title(u'кириллица') == u'кириллица' | ||||
|  | ||||
| 	# Strip underlines | ||||
| 	assert youtube_dl._simplify_title(u'\'a_') == u'a' | ||||
							
								
								
									
										128
									
								
								test/test_download.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								test/test_download.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,128 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import errno | ||||
| import hashlib | ||||
| import io | ||||
| import os | ||||
| import json | ||||
| import unittest | ||||
| import sys | ||||
| import hashlib | ||||
| import socket | ||||
|  | ||||
| # Allow direct execution | ||||
| sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | ||||
|  | ||||
| import youtube_dl.FileDownloader | ||||
| import youtube_dl.InfoExtractors | ||||
| from youtube_dl.utils import * | ||||
|  | ||||
| DEF_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'tests.json') | ||||
| PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json") | ||||
|  | ||||
| # General configuration (from __init__, not very elegant...) | ||||
| jar = compat_cookiejar.CookieJar() | ||||
| cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar) | ||||
| proxy_handler = compat_urllib_request.ProxyHandler() | ||||
| opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler()) | ||||
| compat_urllib_request.install_opener(opener) | ||||
| socket.setdefaulttimeout(10) | ||||
|  | ||||
| def _try_rm(filename): | ||||
|     """ Remove a file if it exists """ | ||||
|     try: | ||||
|         os.remove(filename) | ||||
|     except OSError as ose: | ||||
|         if ose.errno != errno.ENOENT: | ||||
|             raise | ||||
|  | ||||
| class FileDownloader(youtube_dl.FileDownloader): | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         self.to_stderr = self.to_screen | ||||
|         self.processed_info_dicts = [] | ||||
|         return youtube_dl.FileDownloader.__init__(self, *args, **kwargs) | ||||
|     def process_info(self, info_dict): | ||||
|         self.processed_info_dicts.append(info_dict) | ||||
|         return youtube_dl.FileDownloader.process_info(self, info_dict) | ||||
|  | ||||
| def _file_md5(fn): | ||||
|     with open(fn, 'rb') as f: | ||||
|         return hashlib.md5(f.read()).hexdigest() | ||||
|  | ||||
| with io.open(DEF_FILE, encoding='utf-8') as deff: | ||||
|     defs = json.load(deff) | ||||
| with io.open(PARAMETERS_FILE, encoding='utf-8') as pf: | ||||
|     parameters = json.load(pf) | ||||
|  | ||||
|  | ||||
| class TestDownload(unittest.TestCase): | ||||
|     def setUp(self): | ||||
|         self.parameters = parameters | ||||
|         self.defs = defs | ||||
|  | ||||
| ### Dynamically generate tests | ||||
| def generator(test_case): | ||||
|  | ||||
|     def test_template(self): | ||||
|         ie = getattr(youtube_dl.InfoExtractors, test_case['name'] + 'IE') | ||||
|         if not ie._WORKING: | ||||
|             print('Skipping: IE marked as not _WORKING') | ||||
|             return | ||||
|         if 'playlist' not in test_case and not test_case['file']: | ||||
|             print('Skipping: No output file specified') | ||||
|             return | ||||
|         if 'skip' in test_case: | ||||
|             print('Skipping: {0}'.format(test_case['skip'])) | ||||
|             return | ||||
|  | ||||
|         params = self.parameters.copy() | ||||
|         params.update(test_case.get('params', {})) | ||||
|  | ||||
|         fd = FileDownloader(params) | ||||
|         fd.add_info_extractor(ie()) | ||||
|         for ien in test_case.get('add_ie', []): | ||||
|             fd.add_info_extractor(getattr(youtube_dl.InfoExtractors, ien + 'IE')()) | ||||
|         finished_hook_called = set() | ||||
|         def _hook(status): | ||||
|             if status['status'] == 'finished': | ||||
|                 finished_hook_called.add(status['filename']) | ||||
|         fd.add_progress_hook(_hook) | ||||
|  | ||||
|         test_cases = test_case.get('playlist', [test_case]) | ||||
|         for tc in test_cases: | ||||
|             _try_rm(tc['file']) | ||||
|             _try_rm(tc['file'] + '.part') | ||||
|             _try_rm(tc['file'] + '.info.json') | ||||
|         try: | ||||
|             fd.download([test_case['url']]) | ||||
|  | ||||
|             for tc in test_cases: | ||||
|                 if not test_case.get('params', {}).get('skip_download', False): | ||||
|                     self.assertTrue(os.path.exists(tc['file']), msg='Missing file ' + tc['file']) | ||||
|                     self.assertTrue(tc['file'] in finished_hook_called) | ||||
|                 self.assertTrue(os.path.exists(tc['file'] + '.info.json')) | ||||
|                 if 'md5' in tc: | ||||
|                     md5_for_file = _file_md5(tc['file']) | ||||
|                     self.assertEqual(md5_for_file, tc['md5']) | ||||
|                 with io.open(tc['file'] + '.info.json', encoding='utf-8') as infof: | ||||
|                     info_dict = json.load(infof) | ||||
|                 for (info_field, value) in tc.get('info_dict', {}).items(): | ||||
|                     self.assertEqual(value, info_dict.get(info_field)) | ||||
|         finally: | ||||
|             for tc in test_cases: | ||||
|                 _try_rm(tc['file']) | ||||
|                 _try_rm(tc['file'] + '.part') | ||||
|                 _try_rm(tc['file'] + '.info.json') | ||||
|  | ||||
|     return test_template | ||||
|  | ||||
| ### And add them to TestDownload | ||||
| for test_case in defs: | ||||
|     test_method = generator(test_case) | ||||
|     test_method.__name__ = "test_{0}".format(test_case["name"]) | ||||
|     setattr(TestDownload, test_method.__name__, test_method) | ||||
|     del test_method | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										26
									
								
								test/test_execution.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								test/test_execution.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| import unittest | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import subprocess | ||||
|  | ||||
| rootDir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | ||||
|  | ||||
| try: | ||||
|     _DEV_NULL = subprocess.DEVNULL | ||||
| except AttributeError: | ||||
|     _DEV_NULL = open(os.devnull, 'wb') | ||||
|  | ||||
| class TestExecution(unittest.TestCase): | ||||
|     def test_import(self): | ||||
|         subprocess.check_call([sys.executable, '-c', 'import youtube_dl'], cwd=rootDir) | ||||
|  | ||||
|     def test_module_exec(self): | ||||
|         if sys.version_info >= (2,7): # Python 2.6 doesn't support package execution | ||||
|             subprocess.check_call([sys.executable, '-m', 'youtube_dl', '--version'], cwd=rootDir, stdout=_DEV_NULL) | ||||
|  | ||||
|     def test_main_exec(self): | ||||
|         subprocess.check_call([sys.executable, 'youtube_dl/__main__.py', '--version'], cwd=rootDir, stdout=_DEV_NULL) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										100
									
								
								test/test_utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										100
									
								
								test/test_utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,100 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| # Various small unit tests | ||||
|  | ||||
| import sys | ||||
| import unittest | ||||
|  | ||||
| # Allow direct execution | ||||
| import os | ||||
| sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | ||||
|  | ||||
| #from youtube_dl.utils import htmlentity_transform | ||||
| from youtube_dl.utils import timeconvert | ||||
| from youtube_dl.utils import sanitize_filename | ||||
| from youtube_dl.utils import unescapeHTML | ||||
| from youtube_dl.utils import orderedSet | ||||
|  | ||||
| if sys.version_info < (3, 0): | ||||
|     _compat_str = lambda b: b.decode('unicode-escape') | ||||
| else: | ||||
|     _compat_str = lambda s: s | ||||
|  | ||||
|  | ||||
| class TestUtil(unittest.TestCase): | ||||
|     def test_timeconvert(self): | ||||
|         self.assertTrue(timeconvert('') is None) | ||||
|         self.assertTrue(timeconvert('bougrg') is None) | ||||
|  | ||||
|     def test_sanitize_filename(self): | ||||
|         self.assertEqual(sanitize_filename('abc'), 'abc') | ||||
|         self.assertEqual(sanitize_filename('abc_d-e'), 'abc_d-e') | ||||
|  | ||||
|         self.assertEqual(sanitize_filename('123'), '123') | ||||
|  | ||||
|         self.assertEqual('abc_de', sanitize_filename('abc/de')) | ||||
|         self.assertFalse('/' in sanitize_filename('abc/de///')) | ||||
|  | ||||
|         self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de')) | ||||
|         self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|')) | ||||
|         self.assertEqual('yes no', sanitize_filename('yes? no')) | ||||
|         self.assertEqual('this - that', sanitize_filename('this: that')) | ||||
|  | ||||
|         self.assertEqual(sanitize_filename('AT&T'), 'AT&T') | ||||
|         aumlaut = _compat_str('\xe4') | ||||
|         self.assertEqual(sanitize_filename(aumlaut), aumlaut) | ||||
|         tests = _compat_str('\u043a\u0438\u0440\u0438\u043b\u043b\u0438\u0446\u0430') | ||||
|         self.assertEqual(sanitize_filename(tests), tests) | ||||
|  | ||||
|         forbidden = '"\0\\/' | ||||
|         for fc in forbidden: | ||||
|             for fbc in forbidden: | ||||
|                 self.assertTrue(fbc not in sanitize_filename(fc)) | ||||
|  | ||||
|     def test_sanitize_filename_restricted(self): | ||||
|         self.assertEqual(sanitize_filename('abc', restricted=True), 'abc') | ||||
|         self.assertEqual(sanitize_filename('abc_d-e', restricted=True), 'abc_d-e') | ||||
|  | ||||
|         self.assertEqual(sanitize_filename('123', restricted=True), '123') | ||||
|  | ||||
|         self.assertEqual('abc_de', sanitize_filename('abc/de', restricted=True)) | ||||
|         self.assertFalse('/' in sanitize_filename('abc/de///', restricted=True)) | ||||
|  | ||||
|         self.assertEqual('abc_de', sanitize_filename('abc/<>\\*|de', restricted=True)) | ||||
|         self.assertEqual('xxx', sanitize_filename('xxx/<>\\*|', restricted=True)) | ||||
|         self.assertEqual('yes_no', sanitize_filename('yes? no', restricted=True)) | ||||
|         self.assertEqual('this_-_that', sanitize_filename('this: that', restricted=True)) | ||||
|  | ||||
|         tests = _compat_str('a\xe4b\u4e2d\u56fd\u7684c') | ||||
|         self.assertEqual(sanitize_filename(tests, restricted=True), 'a_b_c') | ||||
|         self.assertTrue(sanitize_filename(_compat_str('\xf6'), restricted=True) != '')  # No empty filename | ||||
|  | ||||
|         forbidden = '"\0\\/&!: \'\t\n()[]{}$;`^,#' | ||||
|         for fc in forbidden: | ||||
|             for fbc in forbidden: | ||||
|                 self.assertTrue(fbc not in sanitize_filename(fc, restricted=True)) | ||||
|  | ||||
|         # Handle a common case more neatly | ||||
|         self.assertEqual(sanitize_filename(_compat_str('\u5927\u58f0\u5e26 - Song'), restricted=True), 'Song') | ||||
|         self.assertEqual(sanitize_filename(_compat_str('\u603b\u7edf: Speech'), restricted=True), 'Speech') | ||||
|         # .. but make sure the file name is never empty | ||||
|         self.assertTrue(sanitize_filename('-', restricted=True) != '') | ||||
|         self.assertTrue(sanitize_filename(':', restricted=True) != '') | ||||
|  | ||||
|     def test_sanitize_ids(self): | ||||
|         self.assertEqual(sanitize_filename('_n_cd26wFpw', is_id=True), '_n_cd26wFpw') | ||||
|         self.assertEqual(sanitize_filename('_BD_eEpuzXw', is_id=True), '_BD_eEpuzXw') | ||||
|         self.assertEqual(sanitize_filename('N0Y__7-UOdI', is_id=True), 'N0Y__7-UOdI') | ||||
|  | ||||
|     def test_ordered_set(self): | ||||
|         self.assertEqual(orderedSet([1, 1, 2, 3, 4, 4, 5, 6, 7, 3, 5]), [1, 2, 3, 4, 5, 6, 7]) | ||||
|         self.assertEqual(orderedSet([]), []) | ||||
|         self.assertEqual(orderedSet([1]), [1]) | ||||
|         #keep the list ordered | ||||
|         self.assertEqual(orderedSet([135, 1, 1, 1]), [135, 1]) | ||||
|  | ||||
|     def test_unescape_html(self): | ||||
|         self.assertEqual(unescapeHTML(_compat_str('%20;')), _compat_str('%20;')) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										77
									
								
								test/test_write_info_json.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										77
									
								
								test/test_write_info_json.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,77 @@ | ||||
| #!/usr/bin/env python | ||||
| # coding: utf-8 | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import sys | ||||
| import unittest | ||||
|  | ||||
| # Allow direct execution | ||||
| sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | ||||
|  | ||||
| import youtube_dl.FileDownloader | ||||
| import youtube_dl.InfoExtractors | ||||
| from youtube_dl.utils import * | ||||
|  | ||||
| PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json") | ||||
|  | ||||
| # General configuration (from __init__, not very elegant...) | ||||
| jar = compat_cookiejar.CookieJar() | ||||
| cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar) | ||||
| proxy_handler = compat_urllib_request.ProxyHandler() | ||||
| opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler()) | ||||
| compat_urllib_request.install_opener(opener) | ||||
|  | ||||
| class FileDownloader(youtube_dl.FileDownloader): | ||||
|     def __init__(self, *args, **kwargs): | ||||
|         youtube_dl.FileDownloader.__init__(self, *args, **kwargs) | ||||
|         self.to_stderr = self.to_screen | ||||
|  | ||||
| with io.open(PARAMETERS_FILE, encoding='utf-8') as pf: | ||||
|     params = json.load(pf) | ||||
| params['writeinfojson'] = True | ||||
| params['skip_download'] = True | ||||
| params['writedescription'] = True | ||||
|  | ||||
| TEST_ID = 'BaW_jenozKc' | ||||
| INFO_JSON_FILE = TEST_ID + '.mp4.info.json' | ||||
| DESCRIPTION_FILE = TEST_ID + '.mp4.description' | ||||
| EXPECTED_DESCRIPTION = u'''test chars:  "'/\ä↭𝕐 | ||||
|  | ||||
| This is a test video for youtube-dl. | ||||
|  | ||||
| For more information, contact phihag@phihag.de .''' | ||||
|  | ||||
| class TestInfoJSON(unittest.TestCase): | ||||
|     def setUp(self): | ||||
|         # Clear old files | ||||
|         self.tearDown() | ||||
|  | ||||
|     def test_info_json(self): | ||||
|         ie = youtube_dl.InfoExtractors.YoutubeIE() | ||||
|         fd = FileDownloader(params) | ||||
|         fd.add_info_extractor(ie) | ||||
|         fd.download([TEST_ID]) | ||||
|         self.assertTrue(os.path.exists(INFO_JSON_FILE)) | ||||
|         with io.open(INFO_JSON_FILE, 'r', encoding='utf-8') as jsonf: | ||||
|             jd = json.load(jsonf) | ||||
|         self.assertEqual(jd['upload_date'], u'20121002') | ||||
|         self.assertEqual(jd['description'], EXPECTED_DESCRIPTION) | ||||
|         self.assertEqual(jd['id'], TEST_ID) | ||||
|         self.assertEqual(jd['extractor'], 'youtube') | ||||
|         self.assertEqual(jd['title'], u'''youtube-dl test video "'/\ä↭𝕐''') | ||||
|         self.assertEqual(jd['uploader'], 'Philipp Hagemeister') | ||||
|  | ||||
|         self.assertTrue(os.path.exists(DESCRIPTION_FILE)) | ||||
|         with io.open(DESCRIPTION_FILE, 'r', encoding='utf-8') as descf: | ||||
|             descr = descf.read() | ||||
|         self.assertEqual(descr, EXPECTED_DESCRIPTION) | ||||
|  | ||||
|     def tearDown(self): | ||||
|         if os.path.exists(INFO_JSON_FILE): | ||||
|             os.remove(INFO_JSON_FILE) | ||||
|         if os.path.exists(DESCRIPTION_FILE): | ||||
|             os.remove(DESCRIPTION_FILE) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										73
									
								
								test/test_youtube_lists.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								test/test_youtube_lists.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,73 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import sys | ||||
| import unittest | ||||
| import json | ||||
|  | ||||
| # Allow direct execution | ||||
| import os | ||||
| sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | ||||
|  | ||||
| from youtube_dl.InfoExtractors import YoutubeUserIE,YoutubePlaylistIE | ||||
| from youtube_dl.utils import * | ||||
|  | ||||
| PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json") | ||||
| with io.open(PARAMETERS_FILE, encoding='utf-8') as pf: | ||||
|     parameters = json.load(pf) | ||||
|  | ||||
| # General configuration (from __init__, not very elegant...) | ||||
| jar = compat_cookiejar.CookieJar() | ||||
| cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar) | ||||
| proxy_handler = compat_urllib_request.ProxyHandler() | ||||
| opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler()) | ||||
| compat_urllib_request.install_opener(opener) | ||||
|  | ||||
| class FakeDownloader(object): | ||||
|     def __init__(self): | ||||
|         self.result = [] | ||||
|         self.params = parameters | ||||
|     def to_screen(self, s): | ||||
|         print(s) | ||||
|     def trouble(self, s): | ||||
|         raise Exception(s) | ||||
|     def download(self, x): | ||||
|         self.result.append(x) | ||||
|  | ||||
| class TestYoutubeLists(unittest.TestCase): | ||||
|     def test_youtube_playlist(self): | ||||
|         DL = FakeDownloader() | ||||
|         IE = YoutubePlaylistIE(DL) | ||||
|         IE.extract('https://www.youtube.com/playlist?list=PLwiyx1dc3P2JR9N8gQaQN_BCvlSlap7re') | ||||
|         self.assertEqual(DL.result, [ | ||||
|             ['http://www.youtube.com/watch?v=bV9L5Ht9LgY'], | ||||
|             ['http://www.youtube.com/watch?v=FXxLjLQi3Fg'], | ||||
|             ['http://www.youtube.com/watch?v=tU3Bgo5qJZE'] | ||||
|         ]) | ||||
|  | ||||
|     def test_youtube_playlist_long(self): | ||||
|         DL = FakeDownloader() | ||||
|         IE = YoutubePlaylistIE(DL) | ||||
|         IE.extract('https://www.youtube.com/playlist?list=UUBABnxM4Ar9ten8Mdjj1j0Q') | ||||
|         self.assertTrue(len(DL.result) >= 799) | ||||
|  | ||||
|     def test_youtube_course(self): | ||||
|         DL = FakeDownloader() | ||||
|         IE = YoutubePlaylistIE(DL) | ||||
|         # TODO find a > 100 (paginating?) videos course | ||||
|         IE.extract('https://www.youtube.com/course?list=ECUl4u3cNGP61MdtwGTqZA0MreSaDybji8') | ||||
|         self.assertEqual(DL.result[0], ['http://www.youtube.com/watch?v=j9WZyLZCBzs']) | ||||
|         self.assertEqual(len(DL.result), 25) | ||||
|         self.assertEqual(DL.result[-1], ['http://www.youtube.com/watch?v=rYefUsYuEp0']) | ||||
|  | ||||
|     def test_youtube_channel(self): | ||||
|         # I give up, please find a channel that does paginate and test this like test_youtube_playlist_long | ||||
|         pass # TODO | ||||
|  | ||||
|     def test_youtube_user(self): | ||||
|         DL = FakeDownloader() | ||||
|         IE = YoutubeUserIE(DL) | ||||
|         IE.extract('https://www.youtube.com/user/TheLinuxFoundation') | ||||
|         self.assertTrue(len(DL.result) >= 320) | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										57
									
								
								test/test_youtube_subtitles.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								test/test_youtube_subtitles.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import sys | ||||
| import unittest | ||||
| import json | ||||
| import io | ||||
| import hashlib | ||||
|  | ||||
| # Allow direct execution | ||||
| import os | ||||
| sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | ||||
|  | ||||
| from youtube_dl.InfoExtractors import YoutubeIE | ||||
| from youtube_dl.utils import * | ||||
|  | ||||
| PARAMETERS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "parameters.json") | ||||
| with io.open(PARAMETERS_FILE, encoding='utf-8') as pf: | ||||
|     parameters = json.load(pf) | ||||
|  | ||||
| # General configuration (from __init__, not very elegant...) | ||||
| jar = compat_cookiejar.CookieJar() | ||||
| cookie_processor = compat_urllib_request.HTTPCookieProcessor(jar) | ||||
| proxy_handler = compat_urllib_request.ProxyHandler() | ||||
| opener = compat_urllib_request.build_opener(proxy_handler, cookie_processor, YoutubeDLHandler()) | ||||
| compat_urllib_request.install_opener(opener) | ||||
|  | ||||
| class FakeDownloader(object): | ||||
|     def __init__(self): | ||||
|         self.result = [] | ||||
|         self.params = parameters | ||||
|     def to_screen(self, s): | ||||
|         print(s) | ||||
|     def trouble(self, s): | ||||
|         raise Exception(s) | ||||
|     def download(self, x): | ||||
|         self.result.append(x) | ||||
|  | ||||
| md5 = lambda s: hashlib.md5(s.encode('utf-8')).hexdigest() | ||||
|  | ||||
| class TestYoutubeSubtitles(unittest.TestCase): | ||||
|     def test_youtube_subtitles(self): | ||||
|         DL = FakeDownloader() | ||||
|         DL.params['writesubtitles'] = True | ||||
|         IE = YoutubeIE(DL) | ||||
|         info_dict = IE.extract('QRS8MkLhQmM') | ||||
|         self.assertEqual(md5(info_dict[0]['subtitles']), 'c3228550d59116f3c29fba370b55d033') | ||||
|  | ||||
|     def test_youtube_subtitles_it(self): | ||||
|         DL = FakeDownloader() | ||||
|         DL.params['writesubtitles'] = True | ||||
|         DL.params['subtitleslang'] = 'it' | ||||
|         IE = YoutubeIE(DL) | ||||
|         info_dict = IE.extract('QRS8MkLhQmM') | ||||
|         self.assertEqual(md5(info_dict[0]['subtitles']), '132a88a0daf8e1520f393eb58f1f646a') | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     unittest.main() | ||||
							
								
								
									
										279
									
								
								test/tests.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										279
									
								
								test/tests.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,279 @@ | ||||
| [ | ||||
|   { | ||||
|     "name": "Youtube", | ||||
|     "url":  "http://www.youtube.com/watch?v=BaW_jenozKc", | ||||
|     "file":  "BaW_jenozKc.mp4", | ||||
|     "info_dict": { | ||||
|       "title": "youtube-dl test video \"'/\\ä↭𝕐", | ||||
|       "uploader": "Philipp Hagemeister", | ||||
|       "uploader_id": "phihag", | ||||
|       "upload_date": "20121002", | ||||
|       "description": "test chars:  \"'/\\ä↭𝕐\n\nThis is a test video for youtube-dl.\n\nFor more information, contact phihag@phihag.de ." | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "Dailymotion", | ||||
|     "md5":  "392c4b85a60a90dc4792da41ce3144eb", | ||||
|     "url":  "http://www.dailymotion.com/video/x33vw9_tutoriel-de-youtubeur-dl-des-video_tech", | ||||
|     "file":  "x33vw9.mp4" | ||||
|   }, | ||||
|   { | ||||
|     "name": "Metacafe", | ||||
|     "add_ie": ["Youtube"], | ||||
|     "url":  "http://metacafe.com/watch/yt-_aUehQsCQtM/the_electric_company_short_i_pbs_kids_go/", | ||||
|     "file":  "_aUehQsCQtM.flv" | ||||
|   }, | ||||
|   { | ||||
|     "name": "BlipTV", | ||||
|     "md5":  "b2d849efcf7ee18917e4b4d9ff37cafe", | ||||
|     "url":  "http://blip.tv/cbr/cbr-exclusive-gotham-city-imposters-bats-vs-jokerz-short-3-5796352", | ||||
|     "file":  "5779306.m4v" | ||||
|   }, | ||||
|   { | ||||
|     "name": "XVideos", | ||||
|     "md5":  "1d0c835822f0a71a7bf011855db929d0", | ||||
|     "url":  "http://www.xvideos.com/video939581/funny_porns_by_s_-1", | ||||
|     "file":  "939581.flv" | ||||
|   }, | ||||
|   { | ||||
|     "name": "YouPorn", | ||||
|     "md5": "c37ddbaaa39058c76a7e86c6813423c1", | ||||
|     "url": "http://www.youporn.com/watch/505835/sex-ed-is-it-safe-to-masturbate-daily/", | ||||
|     "file": "505835.mp4" | ||||
|   }, | ||||
|   { | ||||
|     "name": "Pornotube", | ||||
|     "md5": "374dd6dcedd24234453b295209aa69b6", | ||||
|     "url": "http://pornotube.com/c/173/m/1689755/Marilyn-Monroe-Bathing", | ||||
|     "file": "1689755.flv" | ||||
|   }, | ||||
|   { | ||||
|     "name": "YouJizz", | ||||
|     "md5": "07e15fa469ba384c7693fd246905547c", | ||||
|     "url": "http://www.youjizz.com/videos/zeichentrick-1-2189178.html", | ||||
|     "file": "2189178.flv" | ||||
|   }, | ||||
|   { | ||||
|     "name": "Vimeo", | ||||
|     "md5":  "8879b6cc097e987f02484baf890129e5", | ||||
|     "url":  "http://vimeo.com/56015672", | ||||
|     "file": "56015672.mp4", | ||||
|     "info_dict": { | ||||
|       "title": "youtube-dl test video - ★ \" ' 幸 / \\ ä ↭ 𝕐", | ||||
|       "uploader": "Filippo Valsorda", | ||||
|       "uploader_id": "user7108434", | ||||
|       "upload_date": "20121220", | ||||
|       "description": "This is a test case for youtube-dl.\nFor more information, see github.com/rg3/youtube-dl\nTest chars: ★ \" ' 幸 / \\ ä ↭ 𝕐" | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "Soundcloud", | ||||
|     "md5":  "ebef0a451b909710ed1d7787dddbf0d7", | ||||
|     "url":  "http://soundcloud.com/ethmusic/lostin-powers-she-so-heavy", | ||||
|     "file":  "62986583.mp3" | ||||
|   }, | ||||
|   { | ||||
|     "name": "StanfordOpenClassroom", | ||||
|     "md5":  "544a9468546059d4e80d76265b0443b8", | ||||
|     "url":  "http://openclassroom.stanford.edu/MainFolder/VideoPage.php?course=PracticalUnix&video=intro-environment&speed=100", | ||||
|     "file":  "PracticalUnix_intro-environment.mp4", | ||||
|     "skip": "Currently offline" | ||||
|   }, | ||||
|   { | ||||
|     "name": "XNXX", | ||||
|     "md5":  "0831677e2b4761795f68d417e0b7b445", | ||||
|     "url":  "http://video.xnxx.com/video1135332/lida_naked_funny_actress_5_", | ||||
|     "file":  "1135332.flv" | ||||
|   }, | ||||
|   { | ||||
|     "name": "Youku", | ||||
|     "url": "http://v.youku.com/v_show/id_XNDgyMDQ2NTQw.html", | ||||
|     "file": "XNDgyMDQ2NTQw_part00.flv", | ||||
|     "md5": "ffe3f2e435663dc2d1eea34faeff5b5b", | ||||
|     "params": { "test": false } | ||||
|   }, | ||||
|   { | ||||
|     "name": "NBA", | ||||
|     "url": "http://www.nba.com/video/games/nets/2012/12/04/0021200253-okc-bkn-recap.nba/index.html", | ||||
|     "file": "0021200253-okc-bkn-recap.nba.mp4", | ||||
|     "md5": "c0edcfc37607344e2ff8f13c378c88a4" | ||||
|   }, | ||||
|   { | ||||
|     "name": "JustinTV", | ||||
|     "url": "http://www.twitch.tv/thegamedevhub/b/296128360", | ||||
|     "file": "296128360.flv", | ||||
|     "md5": "ecaa8a790c22a40770901460af191c9a" | ||||
|   }, | ||||
|   { | ||||
|     "name": "MyVideo", | ||||
|     "url": "http://www.myvideo.de/watch/8229274/bowling_fail_or_win", | ||||
|     "file": "8229274.flv", | ||||
|     "md5": "2d2753e8130479ba2cb7e0a37002053e" | ||||
|   }, | ||||
|   { | ||||
|     "name": "Escapist", | ||||
|     "url": "http://www.escapistmagazine.com/videos/view/the-escapist-presents/6618-Breaking-Down-Baldurs-Gate", | ||||
|     "file": "6618-Breaking-Down-Baldurs-Gate.flv", | ||||
|     "md5": "c6793dbda81388f4264c1ba18684a74d" | ||||
|   }, | ||||
|   { | ||||
|     "name": "GooglePlus", | ||||
|     "url": "https://plus.google.com/u/0/108897254135232129896/posts/ZButuJc6CtH", | ||||
|     "file": "ZButuJc6CtH.flv" | ||||
|   }, | ||||
|   { | ||||
|     "name": "FunnyOrDie", | ||||
|     "url": "http://www.funnyordie.com/videos/0732f586d7/heart-shaped-box-literal-video-version", | ||||
|     "file": "0732f586d7.mp4", | ||||
|     "md5": "f647e9e90064b53b6e046e75d0241fbd" | ||||
|   }, | ||||
|   { | ||||
|     "name": "TweetReel", | ||||
|     "url": "http://tweetreel.com/?77smq", | ||||
|     "file": "77smq.mov", | ||||
|     "md5": "56b4d9ca9de467920f3f99a6d91255d6", | ||||
|     "info_dict": { | ||||
|         "uploader": "itszero", | ||||
|         "uploader_id": "itszero", | ||||
|         "upload_date": "20091225", | ||||
|         "description": "Installing Gentoo Linux on Powerbook G4, it turns out the sleep indicator becomes HDD activity indicator :D" | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "Steam", | ||||
|     "url": "http://store.steampowered.com/video/105600/", | ||||
|     "playlist": [ | ||||
|       { | ||||
|         "file": "81300.flv", | ||||
|         "md5": "f870007cee7065d7c76b88f0a45ecc07", | ||||
|         "info_dict": { | ||||
|             "title": "Terraria 1.1 Trailer" | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "80859.flv", | ||||
|         "md5": "61aaf31a5c5c3041afb58fb83cbb5751", | ||||
|         "info_dict": { | ||||
|           "title": "Terraria Trailer" | ||||
|         } | ||||
|       } | ||||
|     ] | ||||
|   }, | ||||
|   { | ||||
|     "name": "Ustream", | ||||
|     "url": "http://www.ustream.tv/recorded/20274954", | ||||
|     "file": "20274954.flv", | ||||
|     "md5": "088f151799e8f572f84eb62f17d73e5c", | ||||
|     "info_dict": { | ||||
|         "title": "Young Americans for Liberty February 7, 2012 2:28 AM" | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "InfoQ", | ||||
|     "url": "http://www.infoq.com/presentations/A-Few-of-My-Favorite-Python-Things", | ||||
|     "file": "12-jan-pythonthings.mp4", | ||||
|     "info_dict": { | ||||
|       "title": "A Few of My Favorite [Python] Things" | ||||
|     }, | ||||
|     "params": { | ||||
|       "skip_download": true | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "ComedyCentral", | ||||
|     "url": "http://www.thedailyshow.com/watch/thu-december-13-2012/kristen-stewart", | ||||
|     "file": "422212.mp4", | ||||
|     "md5": "4e2f5cb088a83cd8cdb7756132f9739d", | ||||
|     "info_dict": { | ||||
|         "title": "thedailyshow-kristen-stewart part 1" | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "RBMARadio", | ||||
|     "url": "http://www.rbmaradio.com/shows/ford-lopatin-live-at-primavera-sound-2011", | ||||
|     "file": "ford-lopatin-live-at-primavera-sound-2011.mp3", | ||||
|     "md5": "6bc6f9bcb18994b4c983bc3bf4384d95", | ||||
|     "info_dict": { | ||||
|         "title": "Live at Primavera Sound 2011", | ||||
|         "description": "Joel Ford and Daniel \u2019Oneohtrix Point Never\u2019 Lopatin fly their midified pop extravaganza to Spain. Live at Primavera Sound 2011.", | ||||
|         "uploader": "Ford & Lopatin", | ||||
|         "uploader_id": "ford-lopatin", | ||||
|         "location": "Spain" | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "Facebook", | ||||
|     "url": "https://www.facebook.com/photo.php?v=120708114770723", | ||||
|     "file": "120708114770723.mp4", | ||||
|     "md5": "48975a41ccc4b7a581abd68651c1a5a8", | ||||
|     "info_dict": { | ||||
|       "title": "PEOPLE ARE AWESOME 2013", | ||||
|       "duration": 279 | ||||
|     } | ||||
|   }, | ||||
|   { | ||||
|     "name": "EightTracks", | ||||
|     "url": "http://8tracks.com/ytdl/youtube-dl-test-tracks-a", | ||||
|     "playlist": [ | ||||
|       { | ||||
|         "file": "11885610.m4a", | ||||
|         "md5": "96ce57f24389fc8734ce47f4c1abcc55", | ||||
|         "info_dict": { | ||||
|           "title": "youtue-dl project<>\"' - youtube-dl test track 1 \"'/\\\u00e4\u21ad", | ||||
|           "uploader_id": "ytdl" | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "11885608.m4a", | ||||
|         "md5": "4ab26f05c1f7291ea460a3920be8021f", | ||||
|         "info_dict": { | ||||
|           "title": "youtube-dl project - youtube-dl test track 2 \"'/\\\u00e4\u21ad", | ||||
|           "uploader_id": "ytdl" | ||||
|  | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "11885679.m4a", | ||||
|         "md5": "d30b5b5f74217410f4689605c35d1fd7", | ||||
|         "info_dict": { | ||||
|           "title": "youtube-dl project as well - youtube-dl test track 3 \"'/\\\u00e4\u21ad" | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "11885680.m4a", | ||||
|         "md5": "4eb0a669317cd725f6bbd336a29f923a", | ||||
|         "info_dict": { | ||||
|           "title": "youtube-dl project as well - youtube-dl test track 4 \"'/\\\u00e4\u21ad" | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "11885682.m4a", | ||||
|         "md5": "1893e872e263a2705558d1d319ad19e8", | ||||
|         "info_dict": { | ||||
|           "title": "PH - youtube-dl test track 5 \"'/\\\u00e4\u21ad" | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "11885683.m4a", | ||||
|         "md5": "b673c46f47a216ab1741ae8836af5899", | ||||
|         "info_dict": { | ||||
|           "title": "PH - youtube-dl test track 6 \"'/\\\u00e4\u21ad" | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "11885684.m4a", | ||||
|         "md5": "1d74534e95df54986da7f5abf7d842b7", | ||||
|         "info_dict": { | ||||
|           "title": "phihag - youtube-dl test track 7 \"'/\\\u00e4\u21ad" | ||||
|         } | ||||
|       }, | ||||
|       { | ||||
|         "file": "11885685.m4a", | ||||
|         "md5": "f081f47af8f6ae782ed131d38b9cd1c0", | ||||
|         "info_dict": { | ||||
|           "title": "phihag - youtube-dl test track 8 \"'/\\\u00e4\u21ad" | ||||
|         } | ||||
|       } | ||||
|     ] | ||||
|   } | ||||
| ] | ||||
							
								
								
									
										4561
									
								
								youtube-dl
									
									
									
									
									
								
							
							
						
						
									
										4561
									
								
								youtube-dl
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,6 +0,0 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| import youtube_dl | ||||
|  | ||||
| youtube_dl.main() | ||||
							
								
								
									
										
											BIN
										
									
								
								youtube-dl.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								youtube-dl.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										817
									
								
								youtube_dl/FileDownloader.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										817
									
								
								youtube_dl/FileDownloader.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,817 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| import math | ||||
| import io | ||||
| import os | ||||
| import re | ||||
| import socket | ||||
| import subprocess | ||||
| import sys | ||||
| import time | ||||
| import traceback | ||||
|  | ||||
| if os.name == 'nt': | ||||
|     import ctypes | ||||
|  | ||||
| from .utils import * | ||||
|  | ||||
|  | ||||
| class FileDownloader(object): | ||||
|     """File Downloader class. | ||||
|  | ||||
|     File downloader objects are the ones responsible of downloading the | ||||
|     actual video file and writing it to disk if the user has requested | ||||
|     it, among some other tasks. In most cases there should be one per | ||||
|     program. As, given a video URL, the downloader doesn't know how to | ||||
|     extract all the needed information, task that InfoExtractors do, it | ||||
|     has to pass the URL to one of them. | ||||
|  | ||||
|     For this, file downloader objects have a method that allows | ||||
|     InfoExtractors to be registered in a given order. When it is passed | ||||
|     a URL, the file downloader handles it to the first InfoExtractor it | ||||
|     finds that reports being able to handle it. The InfoExtractor extracts | ||||
|     all the information about the video or videos the URL refers to, and | ||||
|     asks the FileDownloader to process the video information, possibly | ||||
|     downloading the video. | ||||
|  | ||||
|     File downloaders accept a lot of parameters. In order not to saturate | ||||
|     the object constructor with arguments, it receives a dictionary of | ||||
|     options instead. These options are available through the params | ||||
|     attribute for the InfoExtractors to use. The FileDownloader also | ||||
|     registers itself as the downloader in charge for the InfoExtractors | ||||
|     that are added to it, so this is a "mutual registration". | ||||
|  | ||||
|     Available options: | ||||
|  | ||||
|     username:          Username for authentication purposes. | ||||
|     password:          Password for authentication purposes. | ||||
|     usenetrc:          Use netrc for authentication instead. | ||||
|     quiet:             Do not print messages to stdout. | ||||
|     forceurl:          Force printing final URL. | ||||
|     forcetitle:        Force printing title. | ||||
|     forcethumbnail:    Force printing thumbnail URL. | ||||
|     forcedescription:  Force printing description. | ||||
|     forcefilename:     Force printing final filename. | ||||
|     simulate:          Do not download the video files. | ||||
|     format:            Video format code. | ||||
|     format_limit:      Highest quality format to try. | ||||
|     outtmpl:           Template for output names. | ||||
|     restrictfilenames: Do not allow "&" and spaces in file names | ||||
|     ignoreerrors:      Do not stop on download errors. | ||||
|     ratelimit:         Download speed limit, in bytes/sec. | ||||
|     nooverwrites:      Prevent overwriting files. | ||||
|     retries:           Number of times to retry for HTTP error 5xx | ||||
|     buffersize:        Size of download buffer in bytes. | ||||
|     noresizebuffer:    Do not automatically resize the download buffer. | ||||
|     continuedl:        Try to continue downloads if possible. | ||||
|     noprogress:        Do not print the progress bar. | ||||
|     playliststart:     Playlist item to start at. | ||||
|     playlistend:       Playlist item to end at. | ||||
|     matchtitle:        Download only matching titles. | ||||
|     rejecttitle:       Reject downloads for matching titles. | ||||
|     logtostderr:       Log messages to stderr instead of stdout. | ||||
|     consoletitle:      Display progress in console window's titlebar. | ||||
|     nopart:            Do not use temporary .part files. | ||||
|     updatetime:        Use the Last-modified header to set output file timestamps. | ||||
|     writedescription:  Write the video description to a .description file | ||||
|     writeinfojson:     Write the video description to a .info.json file | ||||
|     writesubtitles:    Write the video subtitles to a .srt file | ||||
|     subtitleslang:     Language of the subtitles to download | ||||
|     test:              Download only first bytes to test the downloader. | ||||
|     keepvideo:         Keep the video file after post-processing | ||||
|     min_filesize:      Skip files smaller than this size | ||||
|     max_filesize:      Skip files larger than this size | ||||
|     """ | ||||
|  | ||||
|     params = None | ||||
|     _ies = [] | ||||
|     _pps = [] | ||||
|     _download_retcode = None | ||||
|     _num_downloads = None | ||||
|     _screen_file = None | ||||
|  | ||||
|     def __init__(self, params): | ||||
|         """Create a FileDownloader object with the given options.""" | ||||
|         self._ies = [] | ||||
|         self._pps = [] | ||||
|         self._progress_hooks = [] | ||||
|         self._download_retcode = 0 | ||||
|         self._num_downloads = 0 | ||||
|         self._screen_file = [sys.stdout, sys.stderr][params.get('logtostderr', False)] | ||||
|         self.params = params | ||||
|  | ||||
|         if '%(stitle)s' in self.params['outtmpl']: | ||||
|             self.to_stderr(u'WARNING: %(stitle)s is deprecated. Use the %(title)s and the --restrict-filenames flag(which also secures %(uploader)s et al) instead.') | ||||
|  | ||||
|     @staticmethod | ||||
|     def format_bytes(bytes): | ||||
|         if bytes is None: | ||||
|             return 'N/A' | ||||
|         if type(bytes) is str: | ||||
|             bytes = float(bytes) | ||||
|         if bytes == 0.0: | ||||
|             exponent = 0 | ||||
|         else: | ||||
|             exponent = int(math.log(bytes, 1024.0)) | ||||
|         suffix = 'bkMGTPEZY'[exponent] | ||||
|         converted = float(bytes) / float(1024 ** exponent) | ||||
|         return '%.2f%s' % (converted, suffix) | ||||
|  | ||||
|     @staticmethod | ||||
|     def calc_percent(byte_counter, data_len): | ||||
|         if data_len is None: | ||||
|             return '---.-%' | ||||
|         return '%6s' % ('%3.1f%%' % (float(byte_counter) / float(data_len) * 100.0)) | ||||
|  | ||||
|     @staticmethod | ||||
|     def calc_eta(start, now, total, current): | ||||
|         if total is None: | ||||
|             return '--:--' | ||||
|         dif = now - start | ||||
|         if current == 0 or dif < 0.001: # One millisecond | ||||
|             return '--:--' | ||||
|         rate = float(current) / dif | ||||
|         eta = int((float(total) - float(current)) / rate) | ||||
|         (eta_mins, eta_secs) = divmod(eta, 60) | ||||
|         if eta_mins > 99: | ||||
|             return '--:--' | ||||
|         return '%02d:%02d' % (eta_mins, eta_secs) | ||||
|  | ||||
|     @staticmethod | ||||
|     def calc_speed(start, now, bytes): | ||||
|         dif = now - start | ||||
|         if bytes == 0 or dif < 0.001: # One millisecond | ||||
|             return '%10s' % '---b/s' | ||||
|         return '%10s' % ('%s/s' % FileDownloader.format_bytes(float(bytes) / dif)) | ||||
|  | ||||
|     @staticmethod | ||||
|     def best_block_size(elapsed_time, bytes): | ||||
|         new_min = max(bytes / 2.0, 1.0) | ||||
|         new_max = min(max(bytes * 2.0, 1.0), 4194304) # Do not surpass 4 MB | ||||
|         if elapsed_time < 0.001: | ||||
|             return int(new_max) | ||||
|         rate = bytes / elapsed_time | ||||
|         if rate > new_max: | ||||
|             return int(new_max) | ||||
|         if rate < new_min: | ||||
|             return int(new_min) | ||||
|         return int(rate) | ||||
|  | ||||
|     @staticmethod | ||||
|     def parse_bytes(bytestr): | ||||
|         """Parse a string indicating a byte quantity into an integer.""" | ||||
|         matchobj = re.match(r'(?i)^(\d+(?:\.\d+)?)([kMGTPEZY]?)$', bytestr) | ||||
|         if matchobj is None: | ||||
|             return None | ||||
|         number = float(matchobj.group(1)) | ||||
|         multiplier = 1024.0 ** 'bkmgtpezy'.index(matchobj.group(2).lower()) | ||||
|         return int(round(number * multiplier)) | ||||
|  | ||||
|     def add_info_extractor(self, ie): | ||||
|         """Add an InfoExtractor object to the end of the list.""" | ||||
|         self._ies.append(ie) | ||||
|         ie.set_downloader(self) | ||||
|  | ||||
|     def add_post_processor(self, pp): | ||||
|         """Add a PostProcessor object to the end of the chain.""" | ||||
|         self._pps.append(pp) | ||||
|         pp.set_downloader(self) | ||||
|  | ||||
|     def to_screen(self, message, skip_eol=False): | ||||
|         """Print message to stdout if not in quiet mode.""" | ||||
|         assert type(message) == type(u'') | ||||
|         if not self.params.get('quiet', False): | ||||
|             terminator = [u'\n', u''][skip_eol] | ||||
|             output = message + terminator | ||||
|             if 'b' in getattr(self._screen_file, 'mode', '') or sys.version_info[0] < 3: # Python 2 lies about the mode of sys.stdout/sys.stderr | ||||
|                 output = output.encode(preferredencoding(), 'ignore') | ||||
|             self._screen_file.write(output) | ||||
|             self._screen_file.flush() | ||||
|  | ||||
|     def to_stderr(self, message): | ||||
|         """Print message to stderr.""" | ||||
|         assert type(message) == type(u'') | ||||
|         output = message + u'\n' | ||||
|         if 'b' in getattr(self._screen_file, 'mode', '') or sys.version_info[0] < 3: # Python 2 lies about the mode of sys.stdout/sys.stderr | ||||
|             output = output.encode(preferredencoding()) | ||||
|         sys.stderr.write(output) | ||||
|  | ||||
|     def to_cons_title(self, message): | ||||
|         """Set console/terminal window title to message.""" | ||||
|         if not self.params.get('consoletitle', False): | ||||
|             return | ||||
|         if os.name == 'nt' and ctypes.windll.kernel32.GetConsoleWindow(): | ||||
|             # c_wchar_p() might not be necessary if `message` is | ||||
|             # already of type unicode() | ||||
|             ctypes.windll.kernel32.SetConsoleTitleW(ctypes.c_wchar_p(message)) | ||||
|         elif 'TERM' in os.environ: | ||||
|             sys.stderr.write('\033]0;%s\007' % message.encode(preferredencoding())) | ||||
|  | ||||
|     def fixed_template(self): | ||||
|         """Checks if the output template is fixed.""" | ||||
|         return (re.search(u'(?u)%\\(.+?\\)s', self.params['outtmpl']) is None) | ||||
|  | ||||
|     def trouble(self, message=None, tb=None): | ||||
|         """Determine action to take when a download problem appears. | ||||
|  | ||||
|         Depending on if the downloader has been configured to ignore | ||||
|         download errors or not, this method may throw an exception or | ||||
|         not when errors are found, after printing the message. | ||||
|  | ||||
|         tb, if given, is additional traceback information. | ||||
|         """ | ||||
|         if message is not None: | ||||
|             self.to_stderr(message) | ||||
|         if self.params.get('verbose'): | ||||
|             if tb is None: | ||||
|                 tb_data = traceback.format_list(traceback.extract_stack()) | ||||
|                 tb = u''.join(tb_data) | ||||
|             self.to_stderr(tb) | ||||
|         if not self.params.get('ignoreerrors', False): | ||||
|             raise DownloadError(message) | ||||
|         self._download_retcode = 1 | ||||
|  | ||||
|     def slow_down(self, start_time, byte_counter): | ||||
|         """Sleep if the download speed is over the rate limit.""" | ||||
|         rate_limit = self.params.get('ratelimit', None) | ||||
|         if rate_limit is None or byte_counter == 0: | ||||
|             return | ||||
|         now = time.time() | ||||
|         elapsed = now - start_time | ||||
|         if elapsed <= 0.0: | ||||
|             return | ||||
|         speed = float(byte_counter) / elapsed | ||||
|         if speed > rate_limit: | ||||
|             time.sleep((byte_counter - rate_limit * (now - start_time)) / rate_limit) | ||||
|  | ||||
|     def temp_name(self, filename): | ||||
|         """Returns a temporary filename for the given filename.""" | ||||
|         if self.params.get('nopart', False) or filename == u'-' or \ | ||||
|                 (os.path.exists(encodeFilename(filename)) and not os.path.isfile(encodeFilename(filename))): | ||||
|             return filename | ||||
|         return filename + u'.part' | ||||
|  | ||||
|     def undo_temp_name(self, filename): | ||||
|         if filename.endswith(u'.part'): | ||||
|             return filename[:-len(u'.part')] | ||||
|         return filename | ||||
|  | ||||
|     def try_rename(self, old_filename, new_filename): | ||||
|         try: | ||||
|             if old_filename == new_filename: | ||||
|                 return | ||||
|             os.rename(encodeFilename(old_filename), encodeFilename(new_filename)) | ||||
|         except (IOError, OSError) as err: | ||||
|             self.trouble(u'ERROR: unable to rename file') | ||||
|  | ||||
|     def try_utime(self, filename, last_modified_hdr): | ||||
|         """Try to set the last-modified time of the given file.""" | ||||
|         if last_modified_hdr is None: | ||||
|             return | ||||
|         if not os.path.isfile(encodeFilename(filename)): | ||||
|             return | ||||
|         timestr = last_modified_hdr | ||||
|         if timestr is None: | ||||
|             return | ||||
|         filetime = timeconvert(timestr) | ||||
|         if filetime is None: | ||||
|             return filetime | ||||
|         try: | ||||
|             os.utime(filename, (time.time(), filetime)) | ||||
|         except: | ||||
|             pass | ||||
|         return filetime | ||||
|  | ||||
|     def report_writedescription(self, descfn): | ||||
|         """ Report that the description file is being written """ | ||||
|         self.to_screen(u'[info] Writing video description to: ' + descfn) | ||||
|  | ||||
|     def report_writesubtitles(self, srtfn): | ||||
|         """ Report that the subtitles file is being written """ | ||||
|         self.to_screen(u'[info] Writing video subtitles to: ' + srtfn) | ||||
|  | ||||
|     def report_writeinfojson(self, infofn): | ||||
|         """ Report that the metadata file has been written """ | ||||
|         self.to_screen(u'[info] Video description metadata as JSON to: ' + infofn) | ||||
|  | ||||
|     def report_destination(self, filename): | ||||
|         """Report destination filename.""" | ||||
|         self.to_screen(u'[download] Destination: ' + filename) | ||||
|  | ||||
|     def report_progress(self, percent_str, data_len_str, speed_str, eta_str): | ||||
|         """Report download progress.""" | ||||
|         if self.params.get('noprogress', False): | ||||
|             return | ||||
|         self.to_screen(u'\r[download] %s of %s at %s ETA %s' % | ||||
|                 (percent_str, data_len_str, speed_str, eta_str), skip_eol=True) | ||||
|         self.to_cons_title(u'youtube-dl - %s of %s at %s ETA %s' % | ||||
|                 (percent_str.strip(), data_len_str.strip(), speed_str.strip(), eta_str.strip())) | ||||
|  | ||||
|     def report_resuming_byte(self, resume_len): | ||||
|         """Report attempt to resume at given byte.""" | ||||
|         self.to_screen(u'[download] Resuming download at byte %s' % resume_len) | ||||
|  | ||||
|     def report_retry(self, count, retries): | ||||
|         """Report retry in case of HTTP error 5xx""" | ||||
|         self.to_screen(u'[download] Got server HTTP error. Retrying (attempt %d of %d)...' % (count, retries)) | ||||
|  | ||||
|     def report_file_already_downloaded(self, file_name): | ||||
|         """Report file has already been fully downloaded.""" | ||||
|         try: | ||||
|             self.to_screen(u'[download] %s has already been downloaded' % file_name) | ||||
|         except (UnicodeEncodeError) as err: | ||||
|             self.to_screen(u'[download] The file has already been downloaded') | ||||
|  | ||||
|     def report_unable_to_resume(self): | ||||
|         """Report it was impossible to resume download.""" | ||||
|         self.to_screen(u'[download] Unable to resume') | ||||
|  | ||||
|     def report_finish(self): | ||||
|         """Report download finished.""" | ||||
|         if self.params.get('noprogress', False): | ||||
|             self.to_screen(u'[download] Download completed') | ||||
|         else: | ||||
|             self.to_screen(u'') | ||||
|  | ||||
|     def increment_downloads(self): | ||||
|         """Increment the ordinal that assigns a number to each file.""" | ||||
|         self._num_downloads += 1 | ||||
|  | ||||
|     def prepare_filename(self, info_dict): | ||||
|         """Generate the output filename.""" | ||||
|         try: | ||||
|             template_dict = dict(info_dict) | ||||
|  | ||||
|             template_dict['epoch'] = int(time.time()) | ||||
|             template_dict['autonumber'] = u'%05d' % self._num_downloads | ||||
|  | ||||
|             sanitize = lambda k,v: sanitize_filename( | ||||
|                 u'NA' if v is None else compat_str(v), | ||||
|                 restricted=self.params.get('restrictfilenames'), | ||||
|                 is_id=(k==u'id')) | ||||
|             template_dict = dict((k, sanitize(k, v)) for k,v in template_dict.items()) | ||||
|  | ||||
|             filename = self.params['outtmpl'] % template_dict | ||||
|             return filename | ||||
|         except (ValueError, KeyError) as err: | ||||
|             self.trouble(u'ERROR: invalid system charset or erroneous output template') | ||||
|             return None | ||||
|  | ||||
|     def _match_entry(self, info_dict): | ||||
|         """ Returns None iff the file should be downloaded """ | ||||
|  | ||||
|         title = info_dict['title'] | ||||
|         matchtitle = self.params.get('matchtitle', False) | ||||
|         if matchtitle: | ||||
|             matchtitle = matchtitle.decode('utf8') | ||||
|             if not re.search(matchtitle, title, re.IGNORECASE): | ||||
|                 return u'[download] "' + title + '" title did not match pattern "' + matchtitle + '"' | ||||
|         rejecttitle = self.params.get('rejecttitle', False) | ||||
|         if rejecttitle: | ||||
|             rejecttitle = rejecttitle.decode('utf8') | ||||
|             if re.search(rejecttitle, title, re.IGNORECASE): | ||||
|                 return u'"' + title + '" title matched reject pattern "' + rejecttitle + '"' | ||||
|         return None | ||||
|  | ||||
|     def process_info(self, info_dict): | ||||
|         """Process a single dictionary returned by an InfoExtractor.""" | ||||
|  | ||||
|         # Keep for backwards compatibility | ||||
|         info_dict['stitle'] = info_dict['title'] | ||||
|  | ||||
|         if not 'format' in info_dict: | ||||
|             info_dict['format'] = info_dict['ext'] | ||||
|  | ||||
|         reason = self._match_entry(info_dict) | ||||
|         if reason is not None: | ||||
|             self.to_screen(u'[download] ' + reason) | ||||
|             return | ||||
|  | ||||
|         max_downloads = self.params.get('max_downloads') | ||||
|         if max_downloads is not None: | ||||
|             if self._num_downloads > int(max_downloads): | ||||
|                 raise MaxDownloadsReached() | ||||
|  | ||||
|         filename = self.prepare_filename(info_dict) | ||||
|  | ||||
|         # Forced printings | ||||
|         if self.params.get('forcetitle', False): | ||||
|             compat_print(info_dict['title']) | ||||
|         if self.params.get('forceurl', False): | ||||
|             compat_print(info_dict['url']) | ||||
|         if self.params.get('forcethumbnail', False) and 'thumbnail' in info_dict: | ||||
|             compat_print(info_dict['thumbnail']) | ||||
|         if self.params.get('forcedescription', False) and 'description' in info_dict: | ||||
|             compat_print(info_dict['description']) | ||||
|         if self.params.get('forcefilename', False) and filename is not None: | ||||
|             compat_print(filename) | ||||
|         if self.params.get('forceformat', False): | ||||
|             compat_print(info_dict['format']) | ||||
|  | ||||
|         # Do nothing else if in simulate mode | ||||
|         if self.params.get('simulate', False): | ||||
|             return | ||||
|  | ||||
|         if filename is None: | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             dn = os.path.dirname(encodeFilename(filename)) | ||||
|             if dn != '' and not os.path.exists(dn): # dn is already encoded | ||||
|                 os.makedirs(dn) | ||||
|         except (OSError, IOError) as err: | ||||
|             self.trouble(u'ERROR: unable to create directory ' + compat_str(err)) | ||||
|             return | ||||
|  | ||||
|         if self.params.get('writedescription', False): | ||||
|             try: | ||||
|                 descfn = filename + u'.description' | ||||
|                 self.report_writedescription(descfn) | ||||
|                 with io.open(encodeFilename(descfn), 'w', encoding='utf-8') as descfile: | ||||
|                     descfile.write(info_dict['description']) | ||||
|             except (OSError, IOError): | ||||
|                 self.trouble(u'ERROR: Cannot write description file ' + descfn) | ||||
|                 return | ||||
|  | ||||
|         if self.params.get('writesubtitles', False) and 'subtitles' in info_dict and info_dict['subtitles']: | ||||
|             # subtitles download errors are already managed as troubles in relevant IE | ||||
|             # that way it will silently go on when used with unsupporting IE | ||||
|             try: | ||||
|                 srtfn = filename.rsplit('.', 1)[0] + u'.srt' | ||||
|                 self.report_writesubtitles(srtfn) | ||||
|                 with io.open(encodeFilename(srtfn), 'w', encoding='utf-8') as srtfile: | ||||
|                     srtfile.write(info_dict['subtitles']) | ||||
|             except (OSError, IOError): | ||||
|                 self.trouble(u'ERROR: Cannot write subtitles file ' + descfn) | ||||
|                 return | ||||
|  | ||||
|         if self.params.get('writeinfojson', False): | ||||
|             infofn = filename + u'.info.json' | ||||
|             self.report_writeinfojson(infofn) | ||||
|             try: | ||||
|                 json_info_dict = dict((k, v) for k,v in info_dict.items() if not k in ['urlhandle']) | ||||
|                 write_json_file(json_info_dict, encodeFilename(infofn)) | ||||
|             except (OSError, IOError): | ||||
|                 self.trouble(u'ERROR: Cannot write metadata to JSON file ' + infofn) | ||||
|                 return | ||||
|  | ||||
|         if not self.params.get('skip_download', False): | ||||
|             if self.params.get('nooverwrites', False) and os.path.exists(encodeFilename(filename)): | ||||
|                 success = True | ||||
|             else: | ||||
|                 try: | ||||
|                     success = self._do_download(filename, info_dict) | ||||
|                 except (OSError, IOError) as err: | ||||
|                     raise UnavailableVideoError() | ||||
|                 except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err: | ||||
|                     self.trouble(u'ERROR: unable to download video data: %s' % str(err)) | ||||
|                     return | ||||
|                 except (ContentTooShortError, ) as err: | ||||
|                     self.trouble(u'ERROR: content too short (expected %s bytes and served %s)' % (err.expected, err.downloaded)) | ||||
|                     return | ||||
|  | ||||
|             if success: | ||||
|                 try: | ||||
|                     self.post_process(filename, info_dict) | ||||
|                 except (PostProcessingError) as err: | ||||
|                     self.trouble(u'ERROR: postprocessing: %s' % str(err)) | ||||
|                     return | ||||
|  | ||||
|     def download(self, url_list): | ||||
|         """Download a given list of URLs.""" | ||||
|         if len(url_list) > 1 and self.fixed_template(): | ||||
|             raise SameFileError(self.params['outtmpl']) | ||||
|  | ||||
|         for url in url_list: | ||||
|             suitable_found = False | ||||
|             for ie in self._ies: | ||||
|                 # Go to next InfoExtractor if not suitable | ||||
|                 if not ie.suitable(url): | ||||
|                     continue | ||||
|  | ||||
|                 # Warn if the _WORKING attribute is False | ||||
|                 if not ie.working(): | ||||
|                     self.to_stderr(u'WARNING: the program functionality for this site has been marked as broken, ' | ||||
|                                    u'and will probably not work. If you want to go on, use the -i option.') | ||||
|  | ||||
|                 # Suitable InfoExtractor found | ||||
|                 suitable_found = True | ||||
|  | ||||
|                 # Extract information from URL and process it | ||||
|                 try: | ||||
|                     videos = ie.extract(url) | ||||
|                 except ExtractorError as de: # An error we somewhat expected | ||||
|                     self.trouble(u'ERROR: ' + compat_str(de), de.format_traceback()) | ||||
|                     break | ||||
|                 except Exception as e: | ||||
|                     if self.params.get('ignoreerrors', False): | ||||
|                         self.trouble(u'ERROR: ' + compat_str(e), tb=compat_str(traceback.format_exc())) | ||||
|                         break | ||||
|                     else: | ||||
|                         raise | ||||
|  | ||||
|                 if len(videos or []) > 1 and self.fixed_template(): | ||||
|                     raise SameFileError(self.params['outtmpl']) | ||||
|  | ||||
|                 for video in videos or []: | ||||
|                     video['extractor'] = ie.IE_NAME | ||||
|                     try: | ||||
|                         self.increment_downloads() | ||||
|                         self.process_info(video) | ||||
|                     except UnavailableVideoError: | ||||
|                         self.trouble(u'\nERROR: unable to download video') | ||||
|  | ||||
|                 # Suitable InfoExtractor had been found; go to next URL | ||||
|                 break | ||||
|  | ||||
|             if not suitable_found: | ||||
|                 self.trouble(u'ERROR: no suitable InfoExtractor: %s' % url) | ||||
|  | ||||
|         return self._download_retcode | ||||
|  | ||||
|     def post_process(self, filename, ie_info): | ||||
|         """Run all the postprocessors on the given file.""" | ||||
|         info = dict(ie_info) | ||||
|         info['filepath'] = filename | ||||
|         keep_video = None | ||||
|         for pp in self._pps: | ||||
|             try: | ||||
|                 keep_video_wish,new_info = pp.run(info) | ||||
|                 if keep_video_wish is not None: | ||||
|                     if keep_video_wish: | ||||
|                         keep_video = keep_video_wish | ||||
|                     elif keep_video is None: | ||||
|                         # No clear decision yet, let IE decide | ||||
|                         keep_video = keep_video_wish | ||||
|             except PostProcessingError as e: | ||||
|                 self.to_stderr(u'ERROR: ' + e.msg) | ||||
|         if keep_video is False and not self.params.get('keepvideo', False): | ||||
|             try: | ||||
|                 self.to_stderr(u'Deleting original file %s (pass -k to keep)' % filename) | ||||
|                 os.remove(encodeFilename(filename)) | ||||
|             except (IOError, OSError): | ||||
|                 self.to_stderr(u'WARNING: Unable to remove downloaded video file') | ||||
|  | ||||
|     def _download_with_rtmpdump(self, filename, url, player_url, page_url): | ||||
|         self.report_destination(filename) | ||||
|         tmpfilename = self.temp_name(filename) | ||||
|  | ||||
|         # Check for rtmpdump first | ||||
|         try: | ||||
|             subprocess.call(['rtmpdump', '-h'], stdout=(file(os.path.devnull, 'w')), stderr=subprocess.STDOUT) | ||||
|         except (OSError, IOError): | ||||
|             self.trouble(u'ERROR: RTMP download detected but "rtmpdump" could not be run') | ||||
|             return False | ||||
|  | ||||
|         # Download using rtmpdump. rtmpdump returns exit code 2 when | ||||
|         # the connection was interrumpted and resuming appears to be | ||||
|         # possible. This is part of rtmpdump's normal usage, AFAIK. | ||||
|         basic_args = ['rtmpdump', '-q', '-r', url, '-o', tmpfilename] | ||||
|         if player_url is not None: | ||||
|             basic_args += ['-W', player_url] | ||||
|         if page_url is not None: | ||||
|             basic_args += ['--pageUrl', page_url] | ||||
|         args = basic_args + [[], ['-e', '-k', '1']][self.params.get('continuedl', False)] | ||||
|         if self.params.get('verbose', False): | ||||
|             try: | ||||
|                 import pipes | ||||
|                 shell_quote = lambda args: ' '.join(map(pipes.quote, args)) | ||||
|             except ImportError: | ||||
|                 shell_quote = repr | ||||
|             self.to_screen(u'[debug] rtmpdump command line: ' + shell_quote(args)) | ||||
|         retval = subprocess.call(args) | ||||
|         while retval == 2 or retval == 1: | ||||
|             prevsize = os.path.getsize(encodeFilename(tmpfilename)) | ||||
|             self.to_screen(u'\r[rtmpdump] %s bytes' % prevsize, skip_eol=True) | ||||
|             time.sleep(5.0) # This seems to be needed | ||||
|             retval = subprocess.call(basic_args + ['-e'] + [[], ['-k', '1']][retval == 1]) | ||||
|             cursize = os.path.getsize(encodeFilename(tmpfilename)) | ||||
|             if prevsize == cursize and retval == 1: | ||||
|                 break | ||||
|              # Some rtmp streams seem abort after ~ 99.8%. Don't complain for those | ||||
|             if prevsize == cursize and retval == 2 and cursize > 1024: | ||||
|                 self.to_screen(u'\r[rtmpdump] Could not download the whole video. This can happen for some advertisements.') | ||||
|                 retval = 0 | ||||
|                 break | ||||
|         if retval == 0: | ||||
|             fsize = os.path.getsize(encodeFilename(tmpfilename)) | ||||
|             self.to_screen(u'\r[rtmpdump] %s bytes' % fsize) | ||||
|             self.try_rename(tmpfilename, filename) | ||||
|             self._hook_progress({ | ||||
|                 'downloaded_bytes': fsize, | ||||
|                 'total_bytes': fsize, | ||||
|                 'filename': filename, | ||||
|                 'status': 'finished', | ||||
|             }) | ||||
|             return True | ||||
|         else: | ||||
|             self.trouble(u'\nERROR: rtmpdump exited with code %d' % retval) | ||||
|             return False | ||||
|  | ||||
|     def _do_download(self, filename, info_dict): | ||||
|         url = info_dict['url'] | ||||
|  | ||||
|         # Check file already present | ||||
|         if self.params.get('continuedl', False) and os.path.isfile(encodeFilename(filename)) and not self.params.get('nopart', False): | ||||
|             self.report_file_already_downloaded(filename) | ||||
|             self._hook_progress({ | ||||
|                 'filename': filename, | ||||
|                 'status': 'finished', | ||||
|             }) | ||||
|             return True | ||||
|  | ||||
|         # Attempt to download using rtmpdump | ||||
|         if url.startswith('rtmp'): | ||||
|             return self._download_with_rtmpdump(filename, url, | ||||
|                                                 info_dict.get('player_url', None), | ||||
|                                                 info_dict.get('page_url', None)) | ||||
|  | ||||
|         tmpfilename = self.temp_name(filename) | ||||
|         stream = None | ||||
|  | ||||
|         # Do not include the Accept-Encoding header | ||||
|         headers = {'Youtubedl-no-compression': 'True'} | ||||
|         if 'user_agent' in info_dict: | ||||
|             headers['Youtubedl-user-agent'] = info_dict['user_agent'] | ||||
|         basic_request = compat_urllib_request.Request(url, None, headers) | ||||
|         request = compat_urllib_request.Request(url, None, headers) | ||||
|  | ||||
|         if self.params.get('test', False): | ||||
|             request.add_header('Range','bytes=0-10240') | ||||
|  | ||||
|         # Establish possible resume length | ||||
|         if os.path.isfile(encodeFilename(tmpfilename)): | ||||
|             resume_len = os.path.getsize(encodeFilename(tmpfilename)) | ||||
|         else: | ||||
|             resume_len = 0 | ||||
|  | ||||
|         open_mode = 'wb' | ||||
|         if resume_len != 0: | ||||
|             if self.params.get('continuedl', False): | ||||
|                 self.report_resuming_byte(resume_len) | ||||
|                 request.add_header('Range','bytes=%d-' % resume_len) | ||||
|                 open_mode = 'ab' | ||||
|             else: | ||||
|                 resume_len = 0 | ||||
|  | ||||
|         count = 0 | ||||
|         retries = self.params.get('retries', 0) | ||||
|         while count <= retries: | ||||
|             # Establish connection | ||||
|             try: | ||||
|                 if count == 0 and 'urlhandle' in info_dict: | ||||
|                     data = info_dict['urlhandle'] | ||||
|                 data = compat_urllib_request.urlopen(request) | ||||
|                 break | ||||
|             except (compat_urllib_error.HTTPError, ) as err: | ||||
|                 if (err.code < 500 or err.code >= 600) and err.code != 416: | ||||
|                     # Unexpected HTTP error | ||||
|                     raise | ||||
|                 elif err.code == 416: | ||||
|                     # Unable to resume (requested range not satisfiable) | ||||
|                     try: | ||||
|                         # Open the connection again without the range header | ||||
|                         data = compat_urllib_request.urlopen(basic_request) | ||||
|                         content_length = data.info()['Content-Length'] | ||||
|                     except (compat_urllib_error.HTTPError, ) as err: | ||||
|                         if err.code < 500 or err.code >= 600: | ||||
|                             raise | ||||
|                     else: | ||||
|                         # Examine the reported length | ||||
|                         if (content_length is not None and | ||||
|                                 (resume_len - 100 < int(content_length) < resume_len + 100)): | ||||
|                             # The file had already been fully downloaded. | ||||
|                             # Explanation to the above condition: in issue #175 it was revealed that | ||||
|                             # YouTube sometimes adds or removes a few bytes from the end of the file, | ||||
|                             # changing the file size slightly and causing problems for some users. So | ||||
|                             # I decided to implement a suggested change and consider the file | ||||
|                             # completely downloaded if the file size differs less than 100 bytes from | ||||
|                             # the one in the hard drive. | ||||
|                             self.report_file_already_downloaded(filename) | ||||
|                             self.try_rename(tmpfilename, filename) | ||||
|                             self._hook_progress({ | ||||
|                                 'filename': filename, | ||||
|                                 'status': 'finished', | ||||
|                             }) | ||||
|                             return True | ||||
|                         else: | ||||
|                             # The length does not match, we start the download over | ||||
|                             self.report_unable_to_resume() | ||||
|                             open_mode = 'wb' | ||||
|                             break | ||||
|             # Retry | ||||
|             count += 1 | ||||
|             if count <= retries: | ||||
|                 self.report_retry(count, retries) | ||||
|  | ||||
|         if count > retries: | ||||
|             self.trouble(u'ERROR: giving up after %s retries' % retries) | ||||
|             return False | ||||
|  | ||||
|         data_len = data.info().get('Content-length', None) | ||||
|         if data_len is not None: | ||||
|             data_len = int(data_len) + resume_len | ||||
|             min_data_len = self.params.get("min_filesize", None) | ||||
|             max_data_len =  self.params.get("max_filesize", None) | ||||
|             if min_data_len is not None and data_len < min_data_len: | ||||
|                 self.to_screen(u'\r[download] File is smaller than min-filesize (%s bytes < %s bytes). Aborting.' % (data_len, min_data_len)) | ||||
|                 return False | ||||
|             if max_data_len is not None and data_len > max_data_len: | ||||
|                 self.to_screen(u'\r[download] File is larger than max-filesize (%s bytes > %s bytes). Aborting.' % (data_len, max_data_len)) | ||||
|                 return False | ||||
|  | ||||
|         data_len_str = self.format_bytes(data_len) | ||||
|         byte_counter = 0 + resume_len | ||||
|         block_size = self.params.get('buffersize', 1024) | ||||
|         start = time.time() | ||||
|         while True: | ||||
|             # Download and write | ||||
|             before = time.time() | ||||
|             data_block = data.read(block_size) | ||||
|             after = time.time() | ||||
|             if len(data_block) == 0: | ||||
|                 break | ||||
|             byte_counter += len(data_block) | ||||
|  | ||||
|             # Open file just in time | ||||
|             if stream is None: | ||||
|                 try: | ||||
|                     (stream, tmpfilename) = sanitize_open(tmpfilename, open_mode) | ||||
|                     assert stream is not None | ||||
|                     filename = self.undo_temp_name(tmpfilename) | ||||
|                     self.report_destination(filename) | ||||
|                 except (OSError, IOError) as err: | ||||
|                     self.trouble(u'ERROR: unable to open for writing: %s' % str(err)) | ||||
|                     return False | ||||
|             try: | ||||
|                 stream.write(data_block) | ||||
|             except (IOError, OSError) as err: | ||||
|                 self.trouble(u'\nERROR: unable to write data: %s' % str(err)) | ||||
|                 return False | ||||
|             if not self.params.get('noresizebuffer', False): | ||||
|                 block_size = self.best_block_size(after - before, len(data_block)) | ||||
|  | ||||
|             # Progress message | ||||
|             speed_str = self.calc_speed(start, time.time(), byte_counter - resume_len) | ||||
|             if data_len is None: | ||||
|                 self.report_progress('Unknown %', data_len_str, speed_str, 'Unknown ETA') | ||||
|             else: | ||||
|                 percent_str = self.calc_percent(byte_counter, data_len) | ||||
|                 eta_str = self.calc_eta(start, time.time(), data_len - resume_len, byte_counter - resume_len) | ||||
|                 self.report_progress(percent_str, data_len_str, speed_str, eta_str) | ||||
|  | ||||
|             self._hook_progress({ | ||||
|                 'downloaded_bytes': byte_counter, | ||||
|                 'total_bytes': data_len, | ||||
|                 'tmpfilename': tmpfilename, | ||||
|                 'filename': filename, | ||||
|                 'status': 'downloading', | ||||
|             }) | ||||
|  | ||||
|             # Apply rate limit | ||||
|             self.slow_down(start, byte_counter - resume_len) | ||||
|  | ||||
|         if stream is None: | ||||
|             self.trouble(u'\nERROR: Did not get any data blocks') | ||||
|             return False | ||||
|         stream.close() | ||||
|         self.report_finish() | ||||
|         if data_len is not None and byte_counter != data_len: | ||||
|             raise ContentTooShortError(byte_counter, int(data_len)) | ||||
|         self.try_rename(tmpfilename, filename) | ||||
|  | ||||
|         # Update file modification time | ||||
|         if self.params.get('updatetime', True): | ||||
|             info_dict['filetime'] = self.try_utime(filename, data.info().get('last-modified', None)) | ||||
|  | ||||
|         self._hook_progress({ | ||||
|             'downloaded_bytes': byte_counter, | ||||
|             'total_bytes': byte_counter, | ||||
|             'filename': filename, | ||||
|             'status': 'finished', | ||||
|         }) | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     def _hook_progress(self, status): | ||||
|         for ph in self._progress_hooks: | ||||
|             ph(status) | ||||
|  | ||||
|     def add_progress_hook(self, ph): | ||||
|         """ ph gets called on download progress, with a dictionary with the entries | ||||
|         * filename: The final filename | ||||
|         * status: One of "downloading" and "finished" | ||||
|  | ||||
|         It can also have some of the following entries: | ||||
|  | ||||
|         * downloaded_bytes: Bytes on disks | ||||
|         * total_bytes: Total bytes, None if unknown | ||||
|         * tmpfilename: The filename we're currently writing to | ||||
|  | ||||
|         Hooks are guaranteed to be called at least once (with status "finished") | ||||
|         if the download is successful. | ||||
|         """ | ||||
|         self._progress_hooks.append(ph) | ||||
							
								
								
									
										3952
									
								
								youtube_dl/InfoExtractors.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										3952
									
								
								youtube_dl/InfoExtractors.py
									
									
									
									
									
										Executable file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										232
									
								
								youtube_dl/PostProcessor.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										232
									
								
								youtube_dl/PostProcessor.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,232 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| import os | ||||
| import subprocess | ||||
| import sys | ||||
| import time | ||||
|  | ||||
| from .utils import * | ||||
|  | ||||
|  | ||||
| class PostProcessor(object): | ||||
|     """Post Processor class. | ||||
|  | ||||
|     PostProcessor objects can be added to downloaders with their | ||||
|     add_post_processor() method. When the downloader has finished a | ||||
|     successful download, it will take its internal chain of PostProcessors | ||||
|     and start calling the run() method on each one of them, first with | ||||
|     an initial argument and then with the returned value of the previous | ||||
|     PostProcessor. | ||||
|  | ||||
|     The chain will be stopped if one of them ever returns None or the end | ||||
|     of the chain is reached. | ||||
|  | ||||
|     PostProcessor objects follow a "mutual registration" process similar | ||||
|     to InfoExtractor objects. | ||||
|     """ | ||||
|  | ||||
|     _downloader = None | ||||
|  | ||||
|     def __init__(self, downloader=None): | ||||
|         self._downloader = downloader | ||||
|  | ||||
|     def set_downloader(self, downloader): | ||||
|         """Sets the downloader for this PP.""" | ||||
|         self._downloader = downloader | ||||
|  | ||||
|     def run(self, information): | ||||
|         """Run the PostProcessor. | ||||
|  | ||||
|         The "information" argument is a dictionary like the ones | ||||
|         composed by InfoExtractors. The only difference is that this | ||||
|         one has an extra field called "filepath" that points to the | ||||
|         downloaded file. | ||||
|  | ||||
|         This method returns a tuple, the first element of which describes | ||||
|         whether the original file should be kept (i.e. not deleted - None for | ||||
|         no preference), and the second of which is the updated information. | ||||
|  | ||||
|         In addition, this method may raise a PostProcessingError | ||||
|         exception if post processing fails. | ||||
|         """ | ||||
|         return None, information # by default, keep file and do nothing | ||||
|  | ||||
| class FFmpegPostProcessorError(PostProcessingError): | ||||
|     pass | ||||
|  | ||||
| class AudioConversionError(PostProcessingError): | ||||
|     pass | ||||
|  | ||||
| class FFmpegPostProcessor(PostProcessor): | ||||
|     def __init__(self,downloader=None): | ||||
|         PostProcessor.__init__(self, downloader) | ||||
|         self._exes = self.detect_executables() | ||||
|  | ||||
|     @staticmethod | ||||
|     def detect_executables(): | ||||
|         def executable(exe): | ||||
|             try: | ||||
|                 subprocess.Popen([exe, '-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() | ||||
|             except OSError: | ||||
|                 return False | ||||
|             return exe | ||||
|         programs = ['avprobe', 'avconv', 'ffmpeg', 'ffprobe'] | ||||
|         return dict((program, executable(program)) for program in programs) | ||||
|  | ||||
|     def run_ffmpeg(self, path, out_path, opts): | ||||
|         if not self._exes['ffmpeg'] and not self._exes['avconv']: | ||||
|             raise FFmpegPostProcessorError(u'ffmpeg or avconv not found. Please install one.') | ||||
|         cmd = ([self._exes['avconv'] or self._exes['ffmpeg'], '-y', '-i', encodeFilename(path)] | ||||
|                + opts + | ||||
|                [encodeFilename(self._ffmpeg_filename_argument(out_path))]) | ||||
|         p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||||
|         stdout,stderr = p.communicate() | ||||
|         if p.returncode != 0: | ||||
|             msg = stderr.strip().split('\n')[-1] | ||||
|             raise FFmpegPostProcessorError(msg.decode('utf-8', 'replace')) | ||||
|  | ||||
|     def _ffmpeg_filename_argument(self, fn): | ||||
|         # ffmpeg broke --, see https://ffmpeg.org/trac/ffmpeg/ticket/2127 for details | ||||
|         if fn.startswith(u'-'): | ||||
|             return u'./' + fn | ||||
|         return fn | ||||
|  | ||||
| class FFmpegExtractAudioPP(FFmpegPostProcessor): | ||||
|     def __init__(self, downloader=None, preferredcodec=None, preferredquality=None, nopostoverwrites=False): | ||||
|         FFmpegPostProcessor.__init__(self, downloader) | ||||
|         if preferredcodec is None: | ||||
|             preferredcodec = 'best' | ||||
|         self._preferredcodec = preferredcodec | ||||
|         self._preferredquality = preferredquality | ||||
|         self._nopostoverwrites = nopostoverwrites | ||||
|  | ||||
|     def get_audio_codec(self, path): | ||||
|         if not self._exes['ffprobe'] and not self._exes['avprobe']: return None | ||||
|         try: | ||||
|             cmd = [self._exes['avprobe'] or self._exes['ffprobe'], '-show_streams', encodeFilename(self._ffmpeg_filename_argument(path))] | ||||
|             handle = subprocess.Popen(cmd, stderr=compat_subprocess_get_DEVNULL(), stdout=subprocess.PIPE) | ||||
|             output = handle.communicate()[0] | ||||
|             if handle.wait() != 0: | ||||
|                 return None | ||||
|         except (IOError, OSError): | ||||
|             return None | ||||
|         audio_codec = None | ||||
|         for line in output.decode('ascii', 'ignore').split('\n'): | ||||
|             if line.startswith('codec_name='): | ||||
|                 audio_codec = line.split('=')[1].strip() | ||||
|             elif line.strip() == 'codec_type=audio' and audio_codec is not None: | ||||
|                 return audio_codec | ||||
|         return None | ||||
|  | ||||
|     def run_ffmpeg(self, path, out_path, codec, more_opts): | ||||
|         if not self._exes['ffmpeg'] and not self._exes['avconv']: | ||||
|             raise AudioConversionError('ffmpeg or avconv not found. Please install one.') | ||||
|         if codec is None: | ||||
|             acodec_opts = [] | ||||
|         else: | ||||
|             acodec_opts = ['-acodec', codec] | ||||
|         opts = ['-vn'] + acodec_opts + more_opts | ||||
|         try: | ||||
|             FFmpegPostProcessor.run_ffmpeg(self, path, out_path, opts) | ||||
|         except FFmpegPostProcessorError as err: | ||||
|             raise AudioConversionError(err.message) | ||||
|  | ||||
|     def run(self, information): | ||||
|         path = information['filepath'] | ||||
|  | ||||
|         filecodec = self.get_audio_codec(path) | ||||
|         if filecodec is None: | ||||
|             raise PostProcessingError(u'WARNING: unable to obtain file audio codec with ffprobe') | ||||
|  | ||||
|         more_opts = [] | ||||
|         if self._preferredcodec == 'best' or self._preferredcodec == filecodec or (self._preferredcodec == 'm4a' and filecodec == 'aac'): | ||||
|             if filecodec == 'aac' and self._preferredcodec in ['m4a', 'best']: | ||||
|                 # Lossless, but in another container | ||||
|                 acodec = 'copy' | ||||
|                 extension = 'm4a' | ||||
|                 more_opts = [self._exes['avconv'] and '-bsf:a' or '-absf', 'aac_adtstoasc'] | ||||
|             elif filecodec in ['aac', 'mp3', 'vorbis', 'opus']: | ||||
|                 # Lossless if possible | ||||
|                 acodec = 'copy' | ||||
|                 extension = filecodec | ||||
|                 if filecodec == 'aac': | ||||
|                     more_opts = ['-f', 'adts'] | ||||
|                 if filecodec == 'vorbis': | ||||
|                     extension = 'ogg' | ||||
|             else: | ||||
|                 # MP3 otherwise. | ||||
|                 acodec = 'libmp3lame' | ||||
|                 extension = 'mp3' | ||||
|                 more_opts = [] | ||||
|                 if self._preferredquality is not None: | ||||
|                     if int(self._preferredquality) < 10: | ||||
|                         more_opts += [self._exes['avconv'] and '-q:a' or '-aq', self._preferredquality] | ||||
|                     else: | ||||
|                         more_opts += [self._exes['avconv'] and '-b:a' or '-ab', self._preferredquality + 'k'] | ||||
|         else: | ||||
|             # We convert the audio (lossy) | ||||
|             acodec = {'mp3': 'libmp3lame', 'aac': 'aac', 'm4a': 'aac', 'opus': 'opus', 'vorbis': 'libvorbis', 'wav': None}[self._preferredcodec] | ||||
|             extension = self._preferredcodec | ||||
|             more_opts = [] | ||||
|             if self._preferredquality is not None: | ||||
|                 if int(self._preferredquality) < 10: | ||||
|                     more_opts += [self._exes['avconv'] and '-q:a' or '-aq', self._preferredquality] | ||||
|                 else: | ||||
|                     more_opts += [self._exes['avconv'] and '-b:a' or '-ab', self._preferredquality + 'k'] | ||||
|             if self._preferredcodec == 'aac': | ||||
|                 more_opts += ['-f', 'adts'] | ||||
|             if self._preferredcodec == 'm4a': | ||||
|                 more_opts += [self._exes['avconv'] and '-bsf:a' or '-absf', 'aac_adtstoasc'] | ||||
|             if self._preferredcodec == 'vorbis': | ||||
|                 extension = 'ogg' | ||||
|             if self._preferredcodec == 'wav': | ||||
|                 extension = 'wav' | ||||
|                 more_opts += ['-f', 'wav'] | ||||
|  | ||||
|         prefix, sep, ext = path.rpartition(u'.') # not os.path.splitext, since the latter does not work on unicode in all setups | ||||
|         new_path = prefix + sep + extension | ||||
|         try: | ||||
|             if self._nopostoverwrites and os.path.exists(encodeFilename(new_path)): | ||||
|                 self._downloader.to_screen(u'[youtube] Post-process file %s exists, skipping' % new_path) | ||||
|             else: | ||||
|                 self._downloader.to_screen(u'[' + (self._exes['avconv'] and 'avconv' or 'ffmpeg') + '] Destination: ' + new_path) | ||||
|                 self.run_ffmpeg(path, new_path, acodec, more_opts) | ||||
|         except: | ||||
|             etype,e,tb = sys.exc_info() | ||||
|             if isinstance(e, AudioConversionError): | ||||
|                 msg = u'audio conversion failed: ' + e.message | ||||
|             else: | ||||
|                 msg = u'error running ' + (self._exes['avconv'] and 'avconv' or 'ffmpeg') | ||||
|             raise PostProcessingError(msg) | ||||
|  | ||||
|         # Try to update the date time for extracted audio file. | ||||
|         if information.get('filetime') is not None: | ||||
|             try: | ||||
|                 os.utime(encodeFilename(new_path), (time.time(), information['filetime'])) | ||||
|             except: | ||||
|                 self._downloader.to_stderr(u'WARNING: Cannot update utime of audio file') | ||||
|  | ||||
|         information['filepath'] = new_path | ||||
|         return False,information | ||||
|  | ||||
| class FFmpegVideoConvertor(FFmpegPostProcessor): | ||||
|     def __init__(self, downloader=None,preferedformat=None): | ||||
|         super(FFmpegVideoConvertor, self).__init__(downloader) | ||||
|         self._preferedformat=preferedformat | ||||
|  | ||||
|     def run(self, information): | ||||
|         path = information['filepath'] | ||||
|         prefix, sep, ext = path.rpartition(u'.') | ||||
|         outpath = prefix + sep + self._preferedformat | ||||
|         if information['ext'] == self._preferedformat: | ||||
|             self._downloader.to_screen(u'[ffmpeg] Not converting video file %s - already is in target format %s' % (path, self._preferedformat)) | ||||
|             return True,information | ||||
|         self._downloader.to_screen(u'['+'ffmpeg'+'] Converting video from %s to %s, Destination: ' % (information['ext'], self._preferedformat) +outpath) | ||||
|         self.run_ffmpeg(path, outpath, []) | ||||
|         information['filepath'] = outpath | ||||
|         information['format'] = self._preferedformat | ||||
|         information['ext'] = self._preferedformat | ||||
|         return False,information | ||||
							
								
								
									
										4898
									
								
								youtube_dl/__init__.py
									
									
									
									
									
										
										
										Executable file → Normal file
									
								
							
							
						
						
									
										4898
									
								
								youtube_dl/__init__.py
									
									
									
									
									
										
										
										Executable file → Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										17
									
								
								youtube_dl/__main__.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										17
									
								
								youtube_dl/__main__.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| # Execute with | ||||
| # $ python youtube_dl/__main__.py (2.6+) | ||||
| # $ python -m youtube_dl          (2.7+) | ||||
|  | ||||
| import sys | ||||
|  | ||||
| if __package__ is None and not hasattr(sys, "frozen"): | ||||
|     # direct call of __main__.py | ||||
|     import os.path | ||||
|     sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) | ||||
|  | ||||
| import youtube_dl | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     youtube_dl.main() | ||||
							
								
								
									
										160
									
								
								youtube_dl/update.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										160
									
								
								youtube_dl/update.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,160 @@ | ||||
| import json | ||||
| import traceback | ||||
| import hashlib | ||||
| from zipimport import zipimporter | ||||
|  | ||||
| from .utils import * | ||||
| from .version import __version__ | ||||
|  | ||||
| def rsa_verify(message, signature, key): | ||||
|     from struct import pack | ||||
|     from hashlib import sha256 | ||||
|     from sys import version_info | ||||
|     def b(x): | ||||
|         if version_info[0] == 2: return x | ||||
|         else: return x.encode('latin1') | ||||
|     assert(type(message) == type(b(''))) | ||||
|     block_size = 0 | ||||
|     n = key[0] | ||||
|     while n: | ||||
|         block_size += 1 | ||||
|         n >>= 8 | ||||
|     signature = pow(int(signature, 16), key[1], key[0]) | ||||
|     raw_bytes = [] | ||||
|     while signature: | ||||
|         raw_bytes.insert(0, pack("B", signature & 0xFF)) | ||||
|         signature >>= 8 | ||||
|     signature = (block_size - len(raw_bytes)) * b('\x00') + b('').join(raw_bytes) | ||||
|     if signature[0:2] != b('\x00\x01'): return False | ||||
|     signature = signature[2:] | ||||
|     if not b('\x00') in signature: return False | ||||
|     signature = signature[signature.index(b('\x00'))+1:] | ||||
|     if not signature.startswith(b('\x30\x31\x30\x0D\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20')): return False | ||||
|     signature = signature[19:] | ||||
|     if signature != sha256(message).digest(): return False | ||||
|     return True | ||||
|  | ||||
| def update_self(to_screen, verbose, filename): | ||||
|     """Update the program file with the latest version from the repository""" | ||||
|  | ||||
|     UPDATE_URL = "http://rg3.github.com/youtube-dl/update/" | ||||
|     VERSION_URL = UPDATE_URL + 'LATEST_VERSION' | ||||
|     JSON_URL = UPDATE_URL + 'versions.json' | ||||
|     UPDATES_RSA_KEY = (0x9d60ee4d8f805312fdb15a62f87b95bd66177b91df176765d13514a0f1754bcd2057295c5b6f1d35daa6742c3ffc9a82d3e118861c207995a8031e151d863c9927e304576bc80692bc8e094896fcf11b66f3e29e04e3a71e9a11558558acea1840aec37fc396fb6b65dc81a1c4144e03bd1c011de62e3f1357b327d08426fe93, 65537) | ||||
|  | ||||
|  | ||||
|     if not isinstance(globals().get('__loader__'), zipimporter) and not hasattr(sys, "frozen"): | ||||
|         to_screen(u'It looks like you installed youtube-dl with pip, setup.py or a tarball. Please use that to update.') | ||||
|         return | ||||
|  | ||||
|     # Check if there is a new version | ||||
|     try: | ||||
|         newversion = compat_urllib_request.urlopen(VERSION_URL).read().decode('utf-8').strip() | ||||
|     except: | ||||
|         if verbose: to_screen(compat_str(traceback.format_exc())) | ||||
|         to_screen(u'ERROR: can\'t find the current version. Please try again later.') | ||||
|         return | ||||
|     if newversion == __version__: | ||||
|         to_screen(u'youtube-dl is up-to-date (' + __version__ + ')') | ||||
|         return | ||||
|  | ||||
|     # Download and check versions info | ||||
|     try: | ||||
|         versions_info = compat_urllib_request.urlopen(JSON_URL).read().decode('utf-8') | ||||
|         versions_info = json.loads(versions_info) | ||||
|     except: | ||||
|         if verbose: to_screen(compat_str(traceback.format_exc())) | ||||
|         to_screen(u'ERROR: can\'t obtain versions info. Please try again later.') | ||||
|         return | ||||
|     if not 'signature' in versions_info: | ||||
|         to_screen(u'ERROR: the versions file is not signed or corrupted. Aborting.') | ||||
|         return | ||||
|     signature = versions_info['signature'] | ||||
|     del versions_info['signature'] | ||||
|     if not rsa_verify(json.dumps(versions_info, sort_keys=True).encode('utf-8'), signature, UPDATES_RSA_KEY): | ||||
|         to_screen(u'ERROR: the versions file signature is invalid. Aborting.') | ||||
|         return | ||||
|  | ||||
|     to_screen(u'Updating to version ' + versions_info['latest'] + '...') | ||||
|     version = versions_info['versions'][versions_info['latest']] | ||||
|     if version.get('notes'): | ||||
|         to_screen(u'PLEASE NOTE:') | ||||
|         for note in version['notes']: | ||||
|             to_screen(note) | ||||
|  | ||||
|     if not os.access(filename, os.W_OK): | ||||
|         to_screen(u'ERROR: no write permissions on %s' % filename) | ||||
|         return | ||||
|  | ||||
|     # Py2EXE | ||||
|     if hasattr(sys, "frozen"): | ||||
|         exe = os.path.abspath(filename) | ||||
|         directory = os.path.dirname(exe) | ||||
|         if not os.access(directory, os.W_OK): | ||||
|             to_screen(u'ERROR: no write permissions on %s' % directory) | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             urlh = compat_urllib_request.urlopen(version['exe'][0]) | ||||
|             newcontent = urlh.read() | ||||
|             urlh.close() | ||||
|         except (IOError, OSError) as err: | ||||
|             if verbose: to_screen(compat_str(traceback.format_exc())) | ||||
|             to_screen(u'ERROR: unable to download latest version') | ||||
|             return | ||||
|  | ||||
|         newcontent_hash = hashlib.sha256(newcontent).hexdigest() | ||||
|         if newcontent_hash != version['exe'][1]: | ||||
|             to_screen(u'ERROR: the downloaded file hash does not match. Aborting.') | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             with open(exe + '.new', 'wb') as outf: | ||||
|                 outf.write(newcontent) | ||||
|         except (IOError, OSError) as err: | ||||
|             if verbose: to_screen(compat_str(traceback.format_exc())) | ||||
|             to_screen(u'ERROR: unable to write the new version') | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             bat = os.path.join(directory, 'youtube-dl-updater.bat') | ||||
|             b = open(bat, 'w') | ||||
|             b.write(""" | ||||
| echo Updating youtube-dl... | ||||
| ping 127.0.0.1 -n 5 -w 1000 > NUL | ||||
| move /Y "%s.new" "%s" | ||||
| del "%s" | ||||
|             \n""" %(exe, exe, bat)) | ||||
|             b.close() | ||||
|  | ||||
|             os.startfile(bat) | ||||
|         except (IOError, OSError) as err: | ||||
|             if verbose: to_screen(compat_str(traceback.format_exc())) | ||||
|             to_screen(u'ERROR: unable to overwrite current version') | ||||
|             return | ||||
|  | ||||
|     # Zip unix package | ||||
|     elif isinstance(globals().get('__loader__'), zipimporter): | ||||
|         try: | ||||
|             urlh = compat_urllib_request.urlopen(version['bin'][0]) | ||||
|             newcontent = urlh.read() | ||||
|             urlh.close() | ||||
|         except (IOError, OSError) as err: | ||||
|             if verbose: to_screen(compat_str(traceback.format_exc())) | ||||
|             to_screen(u'ERROR: unable to download latest version') | ||||
|             return | ||||
|  | ||||
|         newcontent_hash = hashlib.sha256(newcontent).hexdigest() | ||||
|         if newcontent_hash != version['bin'][1]: | ||||
|             to_screen(u'ERROR: the downloaded file hash does not match. Aborting.') | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             with open(filename, 'wb') as outf: | ||||
|                 outf.write(newcontent) | ||||
|         except (IOError, OSError) as err: | ||||
|             if verbose: to_screen(compat_str(traceback.format_exc())) | ||||
|             to_screen(u'ERROR: unable to overwrite current version') | ||||
|             return | ||||
|  | ||||
|     to_screen(u'Updated youtube-dl. Restart youtube-dl to use the new version.') | ||||
							
								
								
									
										558
									
								
								youtube_dl/utils.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										558
									
								
								youtube_dl/utils.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,558 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| import gzip | ||||
| import io | ||||
| import json | ||||
| import locale | ||||
| import os | ||||
| import re | ||||
| import sys | ||||
| import traceback | ||||
| import zlib | ||||
| import email.utils | ||||
| import json | ||||
|  | ||||
| try: | ||||
|     import urllib.request as compat_urllib_request | ||||
| except ImportError: # Python 2 | ||||
|     import urllib2 as compat_urllib_request | ||||
|  | ||||
| try: | ||||
|     import urllib.error as compat_urllib_error | ||||
| except ImportError: # Python 2 | ||||
|     import urllib2 as compat_urllib_error | ||||
|  | ||||
| try: | ||||
|     import urllib.parse as compat_urllib_parse | ||||
| except ImportError: # Python 2 | ||||
|     import urllib as compat_urllib_parse | ||||
|  | ||||
| try: | ||||
|     from urllib.parse import urlparse as compat_urllib_parse_urlparse | ||||
| except ImportError: # Python 2 | ||||
|     from urlparse import urlparse as compat_urllib_parse_urlparse | ||||
|  | ||||
| try: | ||||
|     import http.cookiejar as compat_cookiejar | ||||
| except ImportError: # Python 2 | ||||
|     import cookielib as compat_cookiejar | ||||
|  | ||||
| try: | ||||
|     import html.entities as compat_html_entities | ||||
| except ImportError: # Python 2 | ||||
|     import htmlentitydefs as compat_html_entities | ||||
|  | ||||
| try: | ||||
|     import html.parser as compat_html_parser | ||||
| except ImportError: # Python 2 | ||||
|     import HTMLParser as compat_html_parser | ||||
|  | ||||
| try: | ||||
|     import http.client as compat_http_client | ||||
| except ImportError: # Python 2 | ||||
|     import httplib as compat_http_client | ||||
|  | ||||
| try: | ||||
|     from subprocess import DEVNULL | ||||
|     compat_subprocess_get_DEVNULL = lambda: DEVNULL | ||||
| except ImportError: | ||||
|     compat_subprocess_get_DEVNULL = lambda: open(os.path.devnull, 'w') | ||||
|  | ||||
| try: | ||||
|     from urllib.parse import parse_qs as compat_parse_qs | ||||
| except ImportError: # Python 2 | ||||
|     # HACK: The following is the correct parse_qs implementation from cpython 3's stdlib. | ||||
|     # Python 2's version is apparently totally broken | ||||
|     def _unquote(string, encoding='utf-8', errors='replace'): | ||||
|         if string == '': | ||||
|             return string | ||||
|         res = string.split('%') | ||||
|         if len(res) == 1: | ||||
|             return string | ||||
|         if encoding is None: | ||||
|             encoding = 'utf-8' | ||||
|         if errors is None: | ||||
|             errors = 'replace' | ||||
|         # pct_sequence: contiguous sequence of percent-encoded bytes, decoded | ||||
|         pct_sequence = b'' | ||||
|         string = res[0] | ||||
|         for item in res[1:]: | ||||
|             try: | ||||
|                 if not item: | ||||
|                     raise ValueError | ||||
|                 pct_sequence += item[:2].decode('hex') | ||||
|                 rest = item[2:] | ||||
|                 if not rest: | ||||
|                     # This segment was just a single percent-encoded character. | ||||
|                     # May be part of a sequence of code units, so delay decoding. | ||||
|                     # (Stored in pct_sequence). | ||||
|                     continue | ||||
|             except ValueError: | ||||
|                 rest = '%' + item | ||||
|             # Encountered non-percent-encoded characters. Flush the current | ||||
|             # pct_sequence. | ||||
|             string += pct_sequence.decode(encoding, errors) + rest | ||||
|             pct_sequence = b'' | ||||
|         if pct_sequence: | ||||
|             # Flush the final pct_sequence | ||||
|             string += pct_sequence.decode(encoding, errors) | ||||
|         return string | ||||
|  | ||||
|     def _parse_qsl(qs, keep_blank_values=False, strict_parsing=False, | ||||
|                 encoding='utf-8', errors='replace'): | ||||
|         qs, _coerce_result = qs, unicode | ||||
|         pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')] | ||||
|         r = [] | ||||
|         for name_value in pairs: | ||||
|             if not name_value and not strict_parsing: | ||||
|                 continue | ||||
|             nv = name_value.split('=', 1) | ||||
|             if len(nv) != 2: | ||||
|                 if strict_parsing: | ||||
|                     raise ValueError("bad query field: %r" % (name_value,)) | ||||
|                 # Handle case of a control-name with no equal sign | ||||
|                 if keep_blank_values: | ||||
|                     nv.append('') | ||||
|                 else: | ||||
|                     continue | ||||
|             if len(nv[1]) or keep_blank_values: | ||||
|                 name = nv[0].replace('+', ' ') | ||||
|                 name = _unquote(name, encoding=encoding, errors=errors) | ||||
|                 name = _coerce_result(name) | ||||
|                 value = nv[1].replace('+', ' ') | ||||
|                 value = _unquote(value, encoding=encoding, errors=errors) | ||||
|                 value = _coerce_result(value) | ||||
|                 r.append((name, value)) | ||||
|         return r | ||||
|  | ||||
|     def compat_parse_qs(qs, keep_blank_values=False, strict_parsing=False, | ||||
|                 encoding='utf-8', errors='replace'): | ||||
|         parsed_result = {} | ||||
|         pairs = _parse_qsl(qs, keep_blank_values, strict_parsing, | ||||
|                         encoding=encoding, errors=errors) | ||||
|         for name, value in pairs: | ||||
|             if name in parsed_result: | ||||
|                 parsed_result[name].append(value) | ||||
|             else: | ||||
|                 parsed_result[name] = [value] | ||||
|         return parsed_result | ||||
|  | ||||
| try: | ||||
|     compat_str = unicode # Python 2 | ||||
| except NameError: | ||||
|     compat_str = str | ||||
|  | ||||
| try: | ||||
|     compat_chr = unichr # Python 2 | ||||
| except NameError: | ||||
|     compat_chr = chr | ||||
|  | ||||
| std_headers = { | ||||
|     'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:10.0) Gecko/20100101 Firefox/10.0', | ||||
|     'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7', | ||||
|     'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', | ||||
|     'Accept-Encoding': 'gzip, deflate', | ||||
|     'Accept-Language': 'en-us,en;q=0.5', | ||||
| } | ||||
|  | ||||
| def preferredencoding(): | ||||
|     """Get preferred encoding. | ||||
|  | ||||
|     Returns the best encoding scheme for the system, based on | ||||
|     locale.getpreferredencoding() and some further tweaks. | ||||
|     """ | ||||
|     try: | ||||
|         pref = locale.getpreferredencoding() | ||||
|         u'TEST'.encode(pref) | ||||
|     except: | ||||
|         pref = 'UTF-8' | ||||
|  | ||||
|     return pref | ||||
|  | ||||
| if sys.version_info < (3,0): | ||||
|     def compat_print(s): | ||||
|         print(s.encode(preferredencoding(), 'xmlcharrefreplace')) | ||||
| else: | ||||
|     def compat_print(s): | ||||
|         assert type(s) == type(u'') | ||||
|         print(s) | ||||
|  | ||||
| # In Python 2.x, json.dump expects a bytestream. | ||||
| # In Python 3.x, it writes to a character stream | ||||
| if sys.version_info < (3,0): | ||||
|     def write_json_file(obj, fn): | ||||
|         with open(fn, 'wb') as f: | ||||
|             json.dump(obj, f) | ||||
| else: | ||||
|     def write_json_file(obj, fn): | ||||
|         with open(fn, 'w', encoding='utf-8') as f: | ||||
|             json.dump(obj, f) | ||||
|  | ||||
| def htmlentity_transform(matchobj): | ||||
|     """Transforms an HTML entity to a character. | ||||
|  | ||||
|     This function receives a match object and is intended to be used with | ||||
|     the re.sub() function. | ||||
|     """ | ||||
|     entity = matchobj.group(1) | ||||
|  | ||||
|     # Known non-numeric HTML entity | ||||
|     if entity in compat_html_entities.name2codepoint: | ||||
|         return compat_chr(compat_html_entities.name2codepoint[entity]) | ||||
|  | ||||
|     mobj = re.match(u'(?u)#(x?\\d+)', entity) | ||||
|     if mobj is not None: | ||||
|         numstr = mobj.group(1) | ||||
|         if numstr.startswith(u'x'): | ||||
|             base = 16 | ||||
|             numstr = u'0%s' % numstr | ||||
|         else: | ||||
|             base = 10 | ||||
|         return compat_chr(int(numstr, base)) | ||||
|  | ||||
|     # Unknown entity in name, return its literal representation | ||||
|     return (u'&%s;' % entity) | ||||
|  | ||||
| compat_html_parser.locatestarttagend = re.compile(r"""<[a-zA-Z][-.a-zA-Z0-9:_]*(?:\s+(?:(?<=['"\s])[^\s/>][^\s/=>]*(?:\s*=+\s*(?:'[^']*'|"[^"]*"|(?!['"])[^>\s]*))?\s*)*)?\s*""", re.VERBOSE) # backport bugfix | ||||
| class AttrParser(compat_html_parser.HTMLParser): | ||||
|     """Modified HTMLParser that isolates a tag with the specified attribute""" | ||||
|     def __init__(self, attribute, value): | ||||
|         self.attribute = attribute | ||||
|         self.value = value | ||||
|         self.result = None | ||||
|         self.started = False | ||||
|         self.depth = {} | ||||
|         self.html = None | ||||
|         self.watch_startpos = False | ||||
|         self.error_count = 0 | ||||
|         compat_html_parser.HTMLParser.__init__(self) | ||||
|  | ||||
|     def error(self, message): | ||||
|         if self.error_count > 10 or self.started: | ||||
|             raise compat_html_parser.HTMLParseError(message, self.getpos()) | ||||
|         self.rawdata = '\n'.join(self.html.split('\n')[self.getpos()[0]:]) # skip one line | ||||
|         self.error_count += 1 | ||||
|         self.goahead(1) | ||||
|  | ||||
|     def loads(self, html): | ||||
|         self.html = html | ||||
|         self.feed(html) | ||||
|         self.close() | ||||
|  | ||||
|     def handle_starttag(self, tag, attrs): | ||||
|         attrs = dict(attrs) | ||||
|         if self.started: | ||||
|             self.find_startpos(None) | ||||
|         if self.attribute in attrs and attrs[self.attribute] == self.value: | ||||
|             self.result = [tag] | ||||
|             self.started = True | ||||
|             self.watch_startpos = True | ||||
|         if self.started: | ||||
|             if not tag in self.depth: self.depth[tag] = 0 | ||||
|             self.depth[tag] += 1 | ||||
|  | ||||
|     def handle_endtag(self, tag): | ||||
|         if self.started: | ||||
|             if tag in self.depth: self.depth[tag] -= 1 | ||||
|             if self.depth[self.result[0]] == 0: | ||||
|                 self.started = False | ||||
|                 self.result.append(self.getpos()) | ||||
|  | ||||
|     def find_startpos(self, x): | ||||
|         """Needed to put the start position of the result (self.result[1]) | ||||
|         after the opening tag with the requested id""" | ||||
|         if self.watch_startpos: | ||||
|             self.watch_startpos = False | ||||
|             self.result.append(self.getpos()) | ||||
|     handle_entityref = handle_charref = handle_data = handle_comment = \ | ||||
|     handle_decl = handle_pi = unknown_decl = find_startpos | ||||
|  | ||||
|     def get_result(self): | ||||
|         if self.result is None: | ||||
|             return None | ||||
|         if len(self.result) != 3: | ||||
|             return None | ||||
|         lines = self.html.split('\n') | ||||
|         lines = lines[self.result[1][0]-1:self.result[2][0]] | ||||
|         lines[0] = lines[0][self.result[1][1]:] | ||||
|         if len(lines) == 1: | ||||
|             lines[-1] = lines[-1][:self.result[2][1]-self.result[1][1]] | ||||
|         lines[-1] = lines[-1][:self.result[2][1]] | ||||
|         return '\n'.join(lines).strip() | ||||
| # Hack for https://github.com/rg3/youtube-dl/issues/662 | ||||
| if sys.version_info < (2, 7, 3): | ||||
|     AttrParser.parse_endtag = (lambda self, i: | ||||
|         i + len("</scr'+'ipt>") | ||||
|         if self.rawdata[i:].startswith("</scr'+'ipt>") | ||||
|         else compat_html_parser.HTMLParser.parse_endtag(self, i)) | ||||
|  | ||||
| def get_element_by_id(id, html): | ||||
|     """Return the content of the tag with the specified ID in the passed HTML document""" | ||||
|     return get_element_by_attribute("id", id, html) | ||||
|  | ||||
| def get_element_by_attribute(attribute, value, html): | ||||
|     """Return the content of the tag with the specified attribute in the passed HTML document""" | ||||
|     parser = AttrParser(attribute, value) | ||||
|     try: | ||||
|         parser.loads(html) | ||||
|     except compat_html_parser.HTMLParseError: | ||||
|         pass | ||||
|     return parser.get_result() | ||||
|  | ||||
|  | ||||
| def clean_html(html): | ||||
|     """Clean an HTML snippet into a readable string""" | ||||
|     # Newline vs <br /> | ||||
|     html = html.replace('\n', ' ') | ||||
|     html = re.sub(r'\s*<\s*br\s*/?\s*>\s*', '\n', html) | ||||
|     html = re.sub(r'<\s*/\s*p\s*>\s*<\s*p[^>]*>', '\n', html) | ||||
|     # Strip html tags | ||||
|     html = re.sub('<.*?>', '', html) | ||||
|     # Replace html entities | ||||
|     html = unescapeHTML(html) | ||||
|     return html | ||||
|  | ||||
|  | ||||
| def sanitize_open(filename, open_mode): | ||||
|     """Try to open the given filename, and slightly tweak it if this fails. | ||||
|  | ||||
|     Attempts to open the given filename. If this fails, it tries to change | ||||
|     the filename slightly, step by step, until it's either able to open it | ||||
|     or it fails and raises a final exception, like the standard open() | ||||
|     function. | ||||
|  | ||||
|     It returns the tuple (stream, definitive_file_name). | ||||
|     """ | ||||
|     try: | ||||
|         if filename == u'-': | ||||
|             if sys.platform == 'win32': | ||||
|                 import msvcrt | ||||
|                 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) | ||||
|             return (sys.stdout, filename) | ||||
|         stream = open(encodeFilename(filename), open_mode) | ||||
|         return (stream, filename) | ||||
|     except (IOError, OSError) as err: | ||||
|         # In case of error, try to remove win32 forbidden chars | ||||
|         filename = re.sub(u'[/<>:"\\|\\\\?\\*]', u'#', filename) | ||||
|  | ||||
|         # An exception here should be caught in the caller | ||||
|         stream = open(encodeFilename(filename), open_mode) | ||||
|         return (stream, filename) | ||||
|  | ||||
|  | ||||
| def timeconvert(timestr): | ||||
|     """Convert RFC 2822 defined time string into system timestamp""" | ||||
|     timestamp = None | ||||
|     timetuple = email.utils.parsedate_tz(timestr) | ||||
|     if timetuple is not None: | ||||
|         timestamp = email.utils.mktime_tz(timetuple) | ||||
|     return timestamp | ||||
|  | ||||
| def sanitize_filename(s, restricted=False, is_id=False): | ||||
|     """Sanitizes a string so it could be used as part of a filename. | ||||
|     If restricted is set, use a stricter subset of allowed characters. | ||||
|     Set is_id if this is not an arbitrary string, but an ID that should be kept if possible | ||||
|     """ | ||||
|     def replace_insane(char): | ||||
|         if char == '?' or ord(char) < 32 or ord(char) == 127: | ||||
|             return '' | ||||
|         elif char == '"': | ||||
|             return '' if restricted else '\'' | ||||
|         elif char == ':': | ||||
|             return '_-' if restricted else ' -' | ||||
|         elif char in '\\/|*<>': | ||||
|             return '_' | ||||
|         if restricted and (char in '!&\'()[]{}$;`^,#' or char.isspace()): | ||||
|             return '_' | ||||
|         if restricted and ord(char) > 127: | ||||
|             return '_' | ||||
|         return char | ||||
|  | ||||
|     result = u''.join(map(replace_insane, s)) | ||||
|     if not is_id: | ||||
|         while '__' in result: | ||||
|             result = result.replace('__', '_') | ||||
|         result = result.strip('_') | ||||
|         # Common case of "Foreign band name - English song title" | ||||
|         if restricted and result.startswith('-_'): | ||||
|             result = result[2:] | ||||
|         if not result: | ||||
|             result = '_' | ||||
|     return result | ||||
|  | ||||
| def orderedSet(iterable): | ||||
|     """ Remove all duplicates from the input iterable """ | ||||
|     res = [] | ||||
|     for el in iterable: | ||||
|         if el not in res: | ||||
|             res.append(el) | ||||
|     return res | ||||
|  | ||||
| def unescapeHTML(s): | ||||
|     """ | ||||
|     @param s a string | ||||
|     """ | ||||
|     assert type(s) == type(u'') | ||||
|  | ||||
|     result = re.sub(u'(?u)&(.+?);', htmlentity_transform, s) | ||||
|     return result | ||||
|  | ||||
| def encodeFilename(s): | ||||
|     """ | ||||
|     @param s The name of the file | ||||
|     """ | ||||
|  | ||||
|     assert type(s) == type(u'') | ||||
|  | ||||
|     # Python 3 has a Unicode API | ||||
|     if sys.version_info >= (3, 0): | ||||
|         return s | ||||
|  | ||||
|     if sys.platform == 'win32' and sys.getwindowsversion()[0] >= 5: | ||||
|         # Pass u'' directly to use Unicode APIs on Windows 2000 and up | ||||
|         # (Detecting Windows NT 4 is tricky because 'major >= 4' would | ||||
|         # match Windows 9x series as well. Besides, NT 4 is obsolete.) | ||||
|         return s | ||||
|     else: | ||||
|         encoding = sys.getfilesystemencoding() | ||||
|         if encoding is None: | ||||
|             encoding = 'utf-8' | ||||
|         return s.encode(encoding, 'ignore') | ||||
|  | ||||
|  | ||||
| class ExtractorError(Exception): | ||||
|     """Error during info extraction.""" | ||||
|     def __init__(self, msg, tb=None): | ||||
|         """ tb, if given, is the original traceback (so that it can be printed out). """ | ||||
|         super(ExtractorError, self).__init__(msg) | ||||
|         self.traceback = tb | ||||
|  | ||||
|     def format_traceback(self): | ||||
|         if self.traceback is None: | ||||
|             return None | ||||
|         return u''.join(traceback.format_tb(self.traceback)) | ||||
|  | ||||
|  | ||||
| class DownloadError(Exception): | ||||
|     """Download Error exception. | ||||
|  | ||||
|     This exception may be thrown by FileDownloader objects if they are not | ||||
|     configured to continue on errors. They will contain the appropriate | ||||
|     error message. | ||||
|     """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class SameFileError(Exception): | ||||
|     """Same File exception. | ||||
|  | ||||
|     This exception will be thrown by FileDownloader objects if they detect | ||||
|     multiple files would have to be downloaded to the same file on disk. | ||||
|     """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class PostProcessingError(Exception): | ||||
|     """Post Processing exception. | ||||
|  | ||||
|     This exception may be raised by PostProcessor's .run() method to | ||||
|     indicate an error in the postprocessing task. | ||||
|     """ | ||||
|     def __init__(self, msg): | ||||
|         self.msg = msg | ||||
|  | ||||
| class MaxDownloadsReached(Exception): | ||||
|     """ --max-downloads limit has been reached. """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class UnavailableVideoError(Exception): | ||||
|     """Unavailable Format exception. | ||||
|  | ||||
|     This exception will be thrown when a video is requested | ||||
|     in a format that is not available for that video. | ||||
|     """ | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class ContentTooShortError(Exception): | ||||
|     """Content Too Short exception. | ||||
|  | ||||
|     This exception may be raised by FileDownloader objects when a file they | ||||
|     download is too small for what the server announced first, indicating | ||||
|     the connection was probably interrupted. | ||||
|     """ | ||||
|     # Both in bytes | ||||
|     downloaded = None | ||||
|     expected = None | ||||
|  | ||||
|     def __init__(self, downloaded, expected): | ||||
|         self.downloaded = downloaded | ||||
|         self.expected = expected | ||||
|  | ||||
| class YoutubeDLHandler(compat_urllib_request.HTTPHandler): | ||||
|     """Handler for HTTP requests and responses. | ||||
|  | ||||
|     This class, when installed with an OpenerDirector, automatically adds | ||||
|     the standard headers to every HTTP request and handles gzipped and | ||||
|     deflated responses from web servers. If compression is to be avoided in | ||||
|     a particular request, the original request in the program code only has | ||||
|     to include the HTTP header "Youtubedl-No-Compression", which will be | ||||
|     removed before making the real request. | ||||
|  | ||||
|     Part of this code was copied from: | ||||
|  | ||||
|     http://techknack.net/python-urllib2-handlers/ | ||||
|  | ||||
|     Andrew Rowls, the author of that code, agreed to release it to the | ||||
|     public domain. | ||||
|     """ | ||||
|  | ||||
|     @staticmethod | ||||
|     def deflate(data): | ||||
|         try: | ||||
|             return zlib.decompress(data, -zlib.MAX_WBITS) | ||||
|         except zlib.error: | ||||
|             return zlib.decompress(data) | ||||
|  | ||||
|     @staticmethod | ||||
|     def addinfourl_wrapper(stream, headers, url, code): | ||||
|         if hasattr(compat_urllib_request.addinfourl, 'getcode'): | ||||
|             return compat_urllib_request.addinfourl(stream, headers, url, code) | ||||
|         ret = compat_urllib_request.addinfourl(stream, headers, url) | ||||
|         ret.code = code | ||||
|         return ret | ||||
|  | ||||
|     def http_request(self, req): | ||||
|         for h,v in std_headers.items(): | ||||
|             if h in req.headers: | ||||
|                 del req.headers[h] | ||||
|             req.add_header(h, v) | ||||
|         if 'Youtubedl-no-compression' in req.headers: | ||||
|             if 'Accept-encoding' in req.headers: | ||||
|                 del req.headers['Accept-encoding'] | ||||
|             del req.headers['Youtubedl-no-compression'] | ||||
|         if 'Youtubedl-user-agent' in req.headers: | ||||
|             if 'User-agent' in req.headers: | ||||
|                 del req.headers['User-agent'] | ||||
|             req.headers['User-agent'] = req.headers['Youtubedl-user-agent'] | ||||
|             del req.headers['Youtubedl-user-agent'] | ||||
|         return req | ||||
|  | ||||
|     def http_response(self, req, resp): | ||||
|         old_resp = resp | ||||
|         # gzip | ||||
|         if resp.headers.get('Content-encoding', '') == 'gzip': | ||||
|             gz = gzip.GzipFile(fileobj=io.BytesIO(resp.read()), mode='r') | ||||
|             resp = self.addinfourl_wrapper(gz, old_resp.headers, old_resp.url, old_resp.code) | ||||
|             resp.msg = old_resp.msg | ||||
|         # deflate | ||||
|         if resp.headers.get('Content-encoding', '') == 'deflate': | ||||
|             gz = io.BytesIO(self.deflate(resp.read())) | ||||
|             resp = self.addinfourl_wrapper(gz, old_resp.headers, old_resp.url, old_resp.code) | ||||
|             resp.msg = old_resp.msg | ||||
|         return resp | ||||
|  | ||||
|     https_request = http_request | ||||
|     https_response = http_response | ||||
							
								
								
									
										2
									
								
								youtube_dl/version.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								youtube_dl/version.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
|  | ||||
| __version__ = '2013.02.02' | ||||
		Reference in New Issue
	
	Block a user