Compare commits
868 Commits
Author | SHA1 | Date |
---|---|---|
Harald Welte | 8bb59f081a | |
Holger Hans Peter Freyther | 5075145814 | |
Holger Hans Peter Freyther | 74b543bbb1 | |
Holger Hans Peter Freyther | 8062967cf3 | |
Holger Hans Peter Freyther | 0b2ae79dad | |
Holger Hans Peter Freyther | 83edf1dce4 | |
Harald Welte | e9ead4c0f3 | |
Harald Welte | e003221b02 | |
Harald Welte | 5cf873f499 | |
Harald Welte | 3c99769de2 | |
Harald Welte | a2a2d67e1a | |
Harald Welte | b16ceec77f | |
Harald Welte | 7fa273ffc2 | |
Holger Hans Peter Freyther | 8fab0f3222 | |
Holger Hans Peter Freyther | 4f6d4893ce | |
Holger Hans Peter Freyther | 44c4b09e83 | |
Holger Hans Peter Freyther | 2f86725324 | |
Holger Hans Peter Freyther | 57cc5e9c58 | |
Holger Hans Peter Freyther | f608e7e4f9 | |
Holger Hans Peter Freyther | ffce9e7f87 | |
Holger Hans Peter Freyther | 1a9dd62e22 | |
Holger Hans Peter Freyther | f79726fdcc | |
Holger Hans Peter Freyther | f5d3fba955 | |
Holger Hans Peter Freyther | 03fd342e14 | |
Holger Hans Peter Freyther | 69d9a0609d | |
Holger Hans Peter Freyther | 6e7383839c | |
Holger Hans Peter Freyther | 6453080cef | |
Holger Hans Peter Freyther | 57c0346f56 | |
Holger Hans Peter Freyther | 040f4432f5 | |
Holger Hans Peter Freyther | fea919fa5d | |
Holger Hans Peter Freyther | 5b35766651 | |
Holger Hans Peter Freyther | 3f228b8e6e | |
Holger Hans Peter Freyther | 16f9bbb288 | |
Holger Hans Peter Freyther | 673d53bbf5 | |
Holger Hans Peter Freyther | 01e96dd240 | |
Holger Hans Peter Freyther | b6dc4b91a7 | |
Holger Hans Peter Freyther | e9d0aa7cb4 | |
Holger Hans Peter Freyther | bd76a6f5b6 | |
Holger Hans Peter Freyther | 286a9a90cf | |
Holger Hans Peter Freyther | 212ab87663 | |
Holger Hans Peter Freyther | a7946542c8 | |
Holger Hans Peter Freyther | 0c797643dd | |
Holger Hans Peter Freyther | 0ca774fc19 | |
Holger Hans Peter Freyther | f59fe37872 | |
Holger Hans Peter Freyther | 94a82eeedd | |
Holger Hans Peter Freyther | 5732aadd15 | |
Harald Welte | 49ca632e9a | |
Holger Hans Peter Freyther | a144d1c3c0 | |
Harald Welte | 0c79987641 | |
Holger Hans Peter Freyther | 2761cd97f4 | |
Holger Hans Peter Freyther | e89d7ce77d | |
Holger Hans Peter Freyther | 1bca046d23 | |
Holger Hans Peter Freyther | c83652a73a | |
Holger Hans Peter Freyther | 3105854fcb | |
Holger Hans Peter Freyther | 8becd5a4bf | |
Holger Hans Peter Freyther | f9d158b747 | |
Holger Hans Peter Freyther | 96c91eb8d6 | |
Holger Hans Peter Freyther | 9c3b75040a | |
Holger Hans Peter Freyther | 5e475b6b99 | |
Holger Hans Peter Freyther | 8b40a0c2f9 | |
Holger Hans Peter Freyther | 4214e03720 | |
Harald Welte | fcfffacfa8 | |
Harald Welte | 920b27acba | |
Holger Hans Peter Freyther | 336c4212e5 | |
Holger Hans Peter Freyther | 40075f5141 | |
Holger Hans Peter Freyther | 661e10551b | |
Holger Hans Peter Freyther | 4a35eb9273 | |
Holger Hans Peter Freyther | 7782174da7 | |
Harald Welte | 558d76a412 | |
Holger Hans Peter Freyther | 8e4991163e | |
Christopher Larson | 3a01a4f2bc | |
Holger Hans Peter Freyther | cd0d240314 | |
Holger Hans Peter Freyther | 7059d16861 | |
Holger Hans Peter Freyther | d01ad496af | |
Harald Welte | 3cf0f9d338 | |
Harald Welte | 27edfb7481 | |
Holger Hans Peter Freyther | 6e5e1f8b9e | |
Holger Hans Peter Freyther | d9a117ed21 | |
Holger Hans Peter Freyther | 84f23b2203 | |
Harald Welte | 1c3982289e | |
Harald Welte | c793b65258 | |
Harald Welte | 2bae25e4cd | |
Holger Hans Peter Freyther | 7cb8c8d1b5 | |
Harald Welte | 410d086a5a | |
Holger Hans Peter Freyther | 87f47c8586 | |
Harald Welte | 23689a489f | |
Holger Hans Peter Freyther | 7389f6f2b1 | |
Holger Hans Peter Freyther | b7f2cddc23 | |
Holger Hans Peter Freyther | 2257338267 | |
Holger Hans Peter Freyther | 63085244df | |
Holger Hans Peter Freyther | a5d6d983ec | |
Holger Hans Peter Freyther | 41a683d627 | |
Holger Hans Peter Freyther | 99a6840eb4 | |
Holger Hans Peter Freyther | de0f9b5f3d | |
Holger Hans Peter Freyther | aea5136f30 | |
Holger Hans Peter Freyther | ec29824f84 | |
Holger Hans Peter Freyther | 1b7bb4ae95 | |
Holger Hans Peter Freyther | 85d5ea4aae | |
Holger Hans Peter Freyther | 675a2f814f | |
Holger Hans Peter Freyther | 0f3ea35f17 | |
Holger Hans Peter Freyther | a7c614b3dd | |
Holger Hans Peter Freyther | 3e7dbdcdba | |
Holger Hans Peter Freyther | 90d152fcff | |
Holger Hans Peter Freyther | 7aa59c430e | |
Harald Welte | 35ff59937b | |
Harald Welte | 209d4ec0be | |
Harald Welte | cbb7200e61 | |
Harald Welte | fc1c332a7e | |
Harald Welte | 8d7bfd23e9 | |
Harald Welte | 85a4948d4b | |
Harald Welte | 6c31e6c557 | |
Holger Hans Peter Freyther | b6780a5826 | |
Harald Welte | 29263f1625 | |
Harald Welte | b7d4da72b6 | |
Harald Welte | 2b21fe5ad9 | |
Harald Welte | b67bed4d3a | |
Harald Welte | c71b4ead63 | |
Holger Hans Peter Freyther | 89df7559e5 | |
Holger Hans Peter Freyther | 78a632ea52 | |
Harald Welte | b1be0d9702 | |
Harald Welte | 5b6ad0ce9d | |
Harald Welte | 766aabc563 | |
Holger Hans Peter Freyther | 52ddf4d91b | |
Holger Hans Peter Freyther | 0473bf96d3 | |
Harald Welte | 9b48f833cf | |
Harald Welte | e518a3224a | |
Harald Welte | ad495228e2 | |
Holger Hans Peter Freyther | 2b59ca73fe | |
Holger Hans Peter Freyther | c4c453a1db | |
Holger Hans Peter Freyther | e9379558e3 | |
Holger Hans Peter Freyther | 9383d3a2a6 | |
Harald Welte | cf29168330 | |
Holger Hans Peter Freyther | cb0096d041 | |
Harald Welte | bcf7764458 | |
Holger Hans Peter Freyther | ffe5723a3a | |
Harald Welte | 0925b23f06 | |
Holger Hans Peter Freyther | a060c5d2ab | |
Harald Welte | 489f392cc6 | |
Holger Hans Peter Freyther | 487211e30f | |
Holger Hans Peter Freyther | 2deddb9f09 | |
Harald Welte | 7a9a2ddecb | |
Holger Hans Peter Freyther | 73b77fe9ba | |
Harald Welte | d0d30fee99 | |
Harald Welte | 4aad63620b | |
Holger Hans Peter Freyther | 87a6d04505 | |
Holger Hans Peter Freyther | 8523266bce | |
Harald Welte | 557e1b47ba | |
Holger Hans Peter Freyther | cd37803dab | |
Holger Hans Peter Freyther | 267d7a4dab | |
Holger Hans Peter Freyther | c88a4fb4c4 | |
Holger Hans Peter Freyther | b726969f84 | |
Holger Hans Peter Freyther | 0b9c954d0b | |
Holger Hans Peter Freyther | fd02a175f6 | |
Holger Hans Peter Freyther | db8e4ec0e4 | |
Holger Hans Peter Freyther | 9bf7206ce7 | |
Holger Hans Peter Freyther | 8b61400347 | |
Holger Hans Peter Freyther | 7370f20647 | |
Holger Hans Peter Freyther | c7dc800cc5 | |
Holger Hans Peter Freyther | 5368ea68f1 | |
Harald Welte | aff7df713e | |
Holger Hans Peter Freyther | 738df59de8 | |
Holger Hans Peter Freyther | e8e126f1eb | |
Holger Hans Peter Freyther | b92f71e94b | |
Harald Welte | d8a23bc008 | |
Harald Welte | be7111d6c6 | |
Harald Welte | 5b7eff1c95 | |
Harald Welte | 350981f9a8 | |
Harald Welte | 8c57364452 | |
Harald Welte | df64e62c45 | |
Harald Welte | e74231478d | |
Harald Welte | 98b850cdb7 | |
Holger Hans Peter Freyther | a8c239f2a4 | |
Holger Hans Peter Freyther | 38d2ca2f36 | |
Holger Hans Peter Freyther | 8431d916a5 | |
Holger Hans Peter Freyther | f9756a1ec6 | |
Holger Hans Peter Freyther | 1c5bff88f1 | |
Holger Hans Peter Freyther | f63bb56056 | |
Holger Hans Peter Freyther | 671337f9c8 | |
Holger Hans Peter Freyther | 99d3462a72 | |
Holger Hans Peter Freyther | 99fabcdfd6 | |
Holger Hans Peter Freyther | bd02fdb4a0 | |
Holger Hans Peter Freyther | 0f9b8cb7e6 | |
Holger Hans Peter Freyther | 6f27e12588 | |
Holger Hans Peter Freyther | f8d915a370 | |
Holger Hans Peter Freyther | a02d4dfabf | |
Holger Hans Peter Freyther | 790799b138 | |
Holger Hans Peter Freyther | 3e4ffca0b3 | |
Holger Hans Peter Freyther | 02f61cf522 | |
Holger Hans Peter Freyther | f36b6907a9 | |
Harald Welte | 55b7b92f34 | |
Harald Welte | a1aad22e4e | |
Harald Welte | 638474f669 | |
Holger Hans Peter Freyther | b272d37c60 | |
Holger Hans Peter Freyther | 9e2fce0fba | |
Holger Hans Peter Freyther | 91523445fc | |
Holger Hans Peter Freyther | 65e5ee8e4e | |
Holger Hans Peter Freyther | 12a1b8ea4b | |
Holger Hans Peter Freyther | 9e130f1ac1 | |
Holger Hans Peter Freyther | dbb57efcd1 | |
Holger Hans Peter Freyther | d6fd5c09d0 | |
Holger Hans Peter Freyther | ee7b9cf4fa | |
Holger Hans Peter Freyther | 9106dd27b9 | |
Holger Hans Peter Freyther | 67ff3d4b8b | |
Holger Hans Peter Freyther | 61eb982980 | |
Holger Hans Peter Freyther | 16aafee394 | |
Holger Hans Peter Freyther | 653bccdde1 | |
Holger Hans Peter Freyther | 8d053fb3ae | |
Harald Welte | b13eaf470f | |
Holger Hans Peter Freyther | 3279c6791f | |
Holger Hans Peter Freyther | 6f54e1b374 | |
Harald Welte | d1e0a1f78d | |
Harald Welte | 6ae5eefa93 | |
Harald Welte | d07870af73 | |
Holger Hans Peter Freyther | 0f4626d2d8 | |
Jan Luebbe | 5e17d8e537 | |
Holger Hans Peter Freyther | a5c48367a6 | |
Holger Hans Peter Freyther | 2ae26ee3fa | |
Holger Hans Peter Freyther | 3be74372be | |
Holger Hans Peter Freyther | 46ea81ade5 | |
Holger Hans Peter Freyther | aeb390d1cf | |
Holger Hans Peter Freyther | 6a4abb5830 | |
Holger Hans Peter Freyther | b8b0dff3d4 | |
Holger Hans Peter Freyther | 4f78b6ef78 | |
Holger Hans Peter Freyther | 97ba83f17a | |
Holger Hans Peter Freyther | 4f61482461 | |
Holger Hans Peter Freyther | a6cbcf75d5 | |
Holger Hans Peter Freyther | 25a726eea5 | |
Holger Hans Peter Freyther | 1579cdf0fe | |
Holger Hans Peter Freyther | c0cbc30d4c | |
Holger Hans Peter Freyther | d6415c8069 | |
Holger Hans Peter Freyther | a4331e9bee | |
Holger Hans Peter Freyther | 138ac3bc45 | |
Holger Hans Peter Freyther | c35fa18841 | |
Holger Hans Peter Freyther | d9150d375c | |
Holger Hans Peter Freyther | fd604f8497 | |
Holger Hans Peter Freyther | bbbc805a2d | |
Holger Hans Peter Freyther | ba44e70c38 | |
Holger Hans Peter Freyther | f5b92b4bf2 | |
Holger Hans Peter Freyther | 2d8f097946 | |
Holger Hans Peter Freyther | f88c9a6fbd | |
Holger Hans Peter Freyther | 62e7f7f1bc | |
Holger Hans Peter Freyther | d03aa9c516 | |
Harald Welte | 2be99ac429 | |
Harald Welte | 6734e9a695 | |
Harald Welte | 5928a0374a | |
Holger Hans Peter Freyther | 8479f83f39 | |
Harald Welte | 73295758f9 | |
Holger Hans Peter Freyther | a641873503 | |
Holger Hans Peter Freyther | 9805fb9f81 | |
Holger Hans Peter Freyther | 50dba82586 | |
Holger Hans Peter Freyther | 4e2ab7017f | |
Holger Hans Peter Freyther | 9f0744ce76 | |
Holger Hans Peter Freyther | 21c475ae4a | |
Holger Hans Peter Freyther | f4320dd67f | |
Holger Hans Peter Freyther | 1f71735516 | |
Holger Hans Peter Freyther | fe53a9d4aa | |
Holger Hans Peter Freyther | 9416f178b1 | |
Holger Hans Peter Freyther | d838ac6e80 | |
Holger Hans Peter Freyther | e2ab56be8f | |
Holger Hans Peter Freyther | e0bf4c7fea | |
Holger Hans Peter Freyther | 4166201d03 | |
Holger Hans Peter Freyther | a6ae74f721 | |
Holger Hans Peter Freyther | 94fa2d9623 | |
Holger Hans Peter Freyther | be68ad33d2 | |
Holger Hans Peter Freyther | 4cfac93114 | |
Holger Hans Peter Freyther | 4df5621826 | |
Holger Hans Peter Freyther | 5d46944934 | |
Holger Hans Peter Freyther | d5f6ae1142 | |
Holger Hans Peter Freyther | e423b30a83 | |
Holger Hans Peter Freyther | ae891be609 | |
Holger Hans Peter Freyther | 4d8fef32c9 | |
Holger Hans Peter Freyther | a6f1cb82a7 | |
Holger Hans Peter Freyther | 33fe48f3b7 | |
Holger Hans Peter Freyther | 794c0e73e6 | |
Holger Hans Peter Freyther | d1d9f99eb6 | |
Holger Hans Peter Freyther | e568fa2c27 | |
Holger Hans Peter Freyther | e6ec2228da | |
Holger Hans Peter Freyther | 9257e4b8a1 | |
Holger Hans Peter Freyther | 0f6a89c788 | |
Holger Hans Peter Freyther | c1625ee284 | |
Holger Hans Peter Freyther | 40355ff62e | |
Holger Hans Peter Freyther | a7befd603b | |
Holger Hans Peter Freyther | 88c1293c49 | |
Jan Luebbe | e94c542d7c | |
Holger Hans Peter Freyther | 90a211eb56 | |
Elizabeth Flanagan | 06072024be | |
Robert Yang | 107ec95659 | |
Richard Purdie | af3e5039e8 | |
Richard Purdie | 08d70734d5 | |
Holger Hans Peter Freyther | f3f2d7149d | |
Yi Zhao | 164a4d1bac | |
Richard Purdie | 7e0dd59e30 | |
Holger Hans Peter Freyther | 4fed144378 | |
Holger Hans Peter Freyther | 914296ad20 | |
Holger Hans Peter Freyther | c0663ce337 | |
Holger Hans Peter Freyther | 420ea289a2 | |
Holger Hans Peter Freyther | d9a77eb641 | |
Holger Hans Peter Freyther | 17acc4a562 | |
Holger Hans Peter Freyther | 45580f23e8 | |
Holger Hans Peter Freyther | 62ac580831 | |
Holger Hans Peter Freyther | be8f70e8a1 | |
Holger Hans Peter Freyther | c29b5d8bb4 | |
Holger Hans Peter Freyther | 03c9643695 | |
Harald Welte | ce6f4b0bce | |
Holger Hans Peter Freyther | 9f86fa35a2 | |
Harald Welte | 242922ceae | |
Harald Welte | a79df5e650 | |
Harald Welte | df89bf00e7 | |
Harald Welte | a3c1f02557 | |
Lianhao Lu | aca161f8a0 | |
Richard Purdie | 94c8d01eba | |
Richard Purdie | e1e0dd932b | |
Richard Purdie | 6c335846d9 | |
Beth 'pidge' Flanagan | 774f93e8d3 | |
Joshua Lock | 535cfa538b | |
Joshua Lock | ac63b3f8ef | |
Scott Garman | 4039b5b97c | |
Richard Purdie | 67334bfb26 | |
Richard Purdie | 36e13dd42f | |
Richard Purdie | de485f4973 | |
Darren Hart | 56310cbc4c | |
Scott Garman | 68cd8deadc | |
Zhai Edwin | b2a0243f05 | |
Otavio Salvador | a99b7d39dc | |
Otavio Salvador | 755508c423 | |
Richard Purdie | adbf38414e | |
Richard Purdie | c3be61e204 | |
Richard Purdie | 490753f440 | |
Richard Purdie | f3fc5e1e3f | |
Richard Purdie | e2c5e5a513 | |
Richard Purdie | c5a9efca96 | |
Holger Hans Peter Freyther | 5508643b52 | |
Holger Hans Peter Freyther | 910852c052 | |
Dexuan Cui | 842d3ece07 | |
Holger Hans Peter Freyther | 7aaa4cc880 | |
Holger Hans Peter Freyther | 6d69dba610 | |
Holger Hans Peter Freyther | aed232262e | |
Holger Hans Peter Freyther | 6dcaf7d614 | |
Harald Welte | 77f660aa0d | |
Holger Hans Peter Freyther | 39d7350b90 | |
Harald Welte | 967184aef7 | |
Holger Hans Peter Freyther | bdbd14a8d5 | |
Harald Welte | b9d489c543 | |
Holger Hans Peter Freyther | 2a378aa333 | |
Holger Hans Peter Freyther | f1ce2c13e5 | |
Holger Hans Peter Freyther | 999cf4843e | |
Holger Hans Peter Freyther | b863e59aa6 | |
Holger Hans Peter Freyther | 73bcda3f97 | |
Holger Hans Peter Freyther | dd6f9356b1 | |
Holger Hans Peter Freyther | 1e224b39ae | |
Holger Hans Peter Freyther | 39b09a8849 | |
Holger Hans Peter Freyther | 8caa70df0a | |
Holger Hans Peter Freyther | 5a11d46c4e | |
Holger Hans Peter Freyther | d24c97d308 | |
Saul Wold | 15905aec48 | |
Saul Wold | 2b92d9f6d3 | |
Holger Hans Peter Freyther | 96a864f527 | |
Richard Purdie | bfa48c3c09 | |
Otavio Salvador | ef6062981b | |
Scott Garman | f57eca6f28 | |
Saul Wold | 885ebdae10 | |
Darren Hart | 33561a5417 | |
Darren Hart | bb31c819be | |
Darren Hart | d1c5de9ccb | |
Darren Hart | 4274ebdd00 | |
Holger Hans Peter Freyther | e005f8d60e | |
Holger Hans Peter Freyther | fab01cc657 | |
Holger Hans Peter Freyther | 9d939cbe6c | |
Holger Hans Peter Freyther | aeca0842f3 | |
Elizabeth Flanagan | 0fbd6a1615 | |
Scott Rifenbark | bda8a084f5 | |
Beth Flanagan | 939ec1ca1e | |
Holger Hans Peter Freyther | e08238ed3d | |
Richard Purdie | 69b307523c | |
Joshua Lock | 6482c0e20d | |
Saul Wold | ac9c62c907 | |
Saul Wold | 806c23ef2e | |
Saul Wold | 4c496a970f | |
Richard Purdie | fa6eb32a5a | |
Joshua Lock | eaec7e9624 | |
Wenzong Fan | e6ea83fece | |
Nitin A Kamble | 613e985811 | |
Wenzong Fan | b900d54f57 | |
Wenzong Fan | 83e5279d62 | |
Wenzong Fan | b36cde2308 | |
Wenzong Fan | c1a2249c96 | |
Joshua Lock | e3afb1ebc8 | |
Saul Wold | 686345f1d0 | |
Saul Wold | 7b15a9372c | |
Saul Wold | b52792d84d | |
Nitin A Kamble | a684aa1df4 | |
Holger Hans Peter Freyther | 904300e21a | |
Matthew McClintock | 69a3fba2aa | |
Jessica Zhang | c6ec5a0d9e | |
Lianhao Lu | de68393270 | |
Richard Purdie | 12e5797e51 | |
Bruce Ashfield | 6c65263f8d | |
Joshua Lock | 32f0a45c33 | |
Jean-François Dagenais | 726f3bce5a | |
Saul Wold | 75f253d7d2 | |
Wolfgang Denk | a5c04850e6 | |
Joshua Lock | cef4500611 | |
Saul Wold | df2da07184 | |
Matthew McClintock | 77912d65c7 | |
Saul Wold | 878425f147 | |
Richard Purdie | fa9ad15e41 | |
Nitin A Kamble | 961f75d11b | |
Matthew McClintock | 60c5ef3508 | |
Matthew McClintock | e5e7a913c2 | |
Richard Purdie | 165e39a0bb | |
Khem Raj | 09d5966e46 | |
Richard Purdie | 0bd433ebf5 | |
Richard Purdie | c2e003ecd5 | |
Richard Purdie | 9f51e226dc | |
Richard Purdie | 681499ebfe | |
Richard Purdie | 5d96094939 | |
Richard Purdie | 10d9e0805e | |
Richard Purdie | c1c6613ddd | |
Richard Purdie | 5db4eaac2d | |
Richard Purdie | f99f36f637 | |
Jiajun Xu | 7705d9a8cc | |
Richard Purdie | bcec98bf1c | |
Matthew McClintock | 0d9809c4ec | |
Saul Wold | dcf64630f8 | |
Saul Wold | fa610f7f20 | |
Paul Eggleton | d97ad36d90 | |
Paul Eggleton | de7377a170 | |
Paul Eggleton | c471ec56b4 | |
Mei Lei | 9d55534cc7 | |
Richard Purdie | 54f4e9b66c | |
Zhai Edwin | 3e783002b3 | |
Martin Jansa | 935678cbe1 | |
Matthew McClintock | ee75b5020b | |
Richard Purdie | 5f21a24580 | |
Richard Purdie | 100002e4c6 | |
Richard Purdie | 71824019fb | |
Matthew McClintock | 3400b3d2df | |
Richard Purdie | 745d83f968 | |
Richard Purdie | 340a680de2 | |
Matthew McClintock | 3048bd79b3 | |
Michael Brown | ef37926f31 | |
Richard Purdie | 4c30bcfbfe | |
Phil Blundell | 709ad80662 | |
Martin Jansa | 08a834a08a | |
Martin Jansa | 6c3dd24e59 | |
Martin Jansa | 66d6c031b0 | |
Martin Jansa | 957882caef | |
Khem Raj | 8781450256 | |
Martin Jansa | 9d23f215a0 | |
Richard Purdie | 2c1a0b7d32 | |
Richard Purdie | 9e52c53a5d | |
Richard Purdie | f812a2c912 | |
Richard Purdie | d3c848094f | |
Richard Purdie | 37d694ae80 | |
Otavio Salvador | e391e1a200 | |
Otavio Salvador | 199f985754 | |
Paul Eggleton | 395ffa8930 | |
Matthew McClintock | 90920546e4 | |
Koen Kooi | 1d9ec42166 | |
Koen Kooi | 57481984c9 | |
Mark Hatle | 05051d864d | |
Mark Hatle | 48ee7e9b3a | |
Mark Hatle | 1278cee687 | |
Mark Hatle | b5195d2739 | |
Mark Hatle | 7561770d43 | |
Paul Menzel | 03fbfe7cf1 | |
Darren Hart | 9faa58ecdc | |
Saul Wold | cc19812fb4 | |
Richard Purdie | 110d499544 | |
Scott Garman | 7fe64f43f4 | |
Scott Garman | 47007075d4 | |
Scott Garman | 9dc2193d31 | |
Scott Garman | 403d5e0b7d | |
Paul Eggleton | 5d9dfed5c4 | |
Paul Eggleton | 9924a6c72d | |
Tom Zanussi | ccf6077d4e | |
Tom Zanussi | 38978dc0b8 | |
Tom Zanussi | 9152ef8b1d | |
Otavio Salvador | 4e4521b5bf | |
Otavio Salvador | 501211d4d5 | |
Eric Bénard | 155aad308c | |
Dongxiao Xu | 11e383d24c | |
Joshua Lock | aff0c68b0f | |
Joshua Lock | 3b75e27536 | |
Joshua Lock | 5f3b7a7616 | |
Eric Bénard | fb8d219960 | |
Scott Garman | ae88920dec | |
Martin Jansa | 57c6f14828 | |
Martin Jansa | bd9a5e1b88 | |
Martin Jansa | 8614fcf709 | |
Martin Jansa | 2202d845ab | |
Koen Kooi | a55d8c6aa4 | |
Martin Jansa | b6312e2d51 | |
Dmitry Cherukhin | 5a41a612c9 | |
Kumar Gala | aaea770f1f | |
Elizabeth Flanagan | 6e4607f23a | |
Richard Purdie | 5a192f85d9 | |
Koen Kooi | 9ce56ec4ca | |
Koen Kooi | e47cfd447c | |
Saul Wold | fc2433de1d | |
Saul Wold | 8cf7c76ce1 | |
Saul Wold | 5d8269d28a | |
Saul Wold | 9f542cf856 | |
Holger Hans Peter Freyther | 398a0159a6 | |
Richard Purdie | 8ce627f9b1 | |
Richard Purdie | a7e5ad1268 | |
Richard Purdie | 8223a46ca0 | |
Richard Purdie | 1890a0f3b2 | |
Joshua Lock | 2dbcd4154c | |
Saul Wold | 0524f419cf | |
Saul Wold | a90c197e94 | |
Saul Wold | 21458bd419 | |
Saul Wold | 7e96247751 | |
Saul Wold | 07e2aa9b80 | |
Saul Wold | 5c37b7ea47 | |
Joshua Lock | 79081f46ec | |
Matthew McClintock | 4f9e333b05 | |
Julian Pidancet | dc09c258f0 | |
Khem Raj | 5de0f305f9 | |
Khem Raj | 52dc5edde3 | |
Otavio Salvador | 9d086cd151 | |
Otavio Salvador | 2edde1021f | |
Otavio Salvador | afc60481c7 | |
Khem Raj | eb94ba9052 | |
Khem Raj | 6fa445d50e | |
Anders Darander | 795843df09 | |
Khem Raj | 8a20492e8a | |
Khem Raj | 70ff3b6d98 | |
Lauri Hintsala | ba79e6f631 | |
Martin Jansa | 071d5de3f3 | |
Xiaofeng Yan | 1fa324c533 | |
Saul Wold | 958c7f773f | |
Saul Wold | 141240c409 | |
Samuel Stirtzel | 89e945be6a | |
Samuel Stirtzel | 7d30c2df87 | |
Wenzong Fan | 90a4f95d3d | |
Tom Rini | 53db004d24 | |
Christopher Larson | f7d5b31d6c | |
Richard Purdie | 35d3782099 | |
Matthew McClintock | 1080ef1105 | |
Matthew McClintock | 7bd151a4f3 | |
Martin Jansa | e1f53370ed | |
Martin Jansa | e708d0ab68 | |
Julian Pidancet | e6e867558b | |
Julian Pidancet | ab81049f37 | |
Jason Wessel | 0b2f036a81 | |
Richard Purdie | 6ed9f0763b | |
Saul Wold | 1e225af16e | |
Richard Purdie | 385365f689 | |
Richard Purdie | dec4fb1bee | |
Kumar Gala | ef1a8f21e0 | |
Richard Purdie | 0c1b16db4c | |
Tom Zanussi | 02c530f442 | |
Dmitry Eremin-Solenikov | df2fddf9cb | |
Martin Jansa | 6f0c0167c6 | |
Richard Purdie | 47c5f1c3bc | |
Otavio Salvador | 29a5cc693c | |
Denis Carikli | c3a1b97511 | |
Eric Bénard | f1369ae9fe | |
Matthew McClintock | 8620d997d4 | |
Matthew McClintock | 7d06a71c02 | |
Richard Purdie | 7c5028614b | |
Matthew McClintock | 6844fac9d5 | |
Saul Wold | 41b5ca8582 | |
Matthew McClintock | 7a1504dfe8 | |
Matthew McClintock | 062623f6ef | |
Matthew McClintock | 62ad5b81cb | |
Matthew McClintock | ada8ebb116 | |
Matthew McClintock | f1f2cbbc0d | |
Matthew McClintock | c434795edf | |
Matthew McClintock | 25330d9f38 | |
Richard Purdie | c1c5eb6866 | |
Richard Purdie | 51e089403a | |
Richard Purdie | 058ef489a0 | |
Khem Raj | 3eb7e626d0 | |
Andrew Gabbasov | 386e75b7f0 | |
Andrew Gabbasov | f72a801d51 | |
Andrew Gabbasov | 4ffc32566a | |
Paul Eggleton | 3f692305dc | |
Paul Eggleton | 4ff17dc89d | |
Saul Wold | 1a506c5dfd | |
Simon Busch | 84865e45ea | |
Richard Purdie | ae97dbe1db | |
Richard Purdie | edb2641243 | |
Richard Purdie | c8635bab0b | |
Richard Purdie | cd50451812 | |
Dmitry Eremin-Solenikov | 877979c8b5 | |
Dmitry Eremin-Solenikov | f69eca96d1 | |
lumag | c0a8c9b985 | |
Dmitry Eremin-Solenikov | 09ab224a2f | |
Dmitry Eremin-Solenikov | 0e9001afd5 | |
Dmitry Eremin-Solenikov | d63678cdfa | |
Khem Raj | 1af2581f0b | |
Richard Purdie | 9dcb176dc8 | |
Zhai Edwin | 64ba74deff | |
Mark Hatle | ff047d3a77 | |
Khem Raj | b137421cfc | |
Dmitry Eremin-Solenikov | 3a8590f105 | |
Dmitry Eremin-Solenikov | 3571525ab8 | |
Dmitry Eremin-Solenikov | 204762c531 | |
Daniel Lazzari | 394d340ab1 | |
Bruce Ashfield | 4bf5435d95 | |
Bruce Ashfield | 05dba88379 | |
Paul Eggleton | 1ab5a6851d | |
Paul Eggleton | 781866f64e | |
Joshua Lock | 702c428804 | |
Xiaofeng Yan | 5882121a94 | |
Joshua Lock | 6c2f754a0a | |
Joshua Lock | acd0bedbce | |
Joshua Lock | 90f8d53800 | |
Matthew McClintock | 23c6b49566 | |
Robert Yang | f204d16012 | |
Richard Purdie | 3796541746 | |
Richard Purdie | 0e676f74c5 | |
Richard Purdie | 26666187e3 | |
Matthew McClintock | c270f92b08 | |
Christopher Larson | 1670051a79 | |
Christopher Larson | c61f04c34e | |
Christopher Larson | 2b26745c70 | |
Christopher Larson | 28ca6cc34b | |
Christopher Larson | ada59bde67 | |
Richard Purdie | 9a68fb1364 | |
Richard Purdie | f87c92143e | |
Richard Purdie | f38e44bbb2 | |
Richard Purdie | 4d7f50382e | |
Matthew McClintock | 6803d97bdb | |
Matthew McClintock | 81ed10442b | |
Richard Purdie | 1a46002fad | |
Richard Purdie | 2747b2003e | |
Richard Purdie | 375297ea28 | |
Richard Purdie | c2662a5095 | |
Richard Purdie | da56e3df88 | |
Richard Purdie | 388dbe4928 | |
Richard Purdie | dbcce81f66 | |
Christopher Larson | 46ac868403 | |
Paul Eggleton | 6e1105e1e8 | |
Richard Purdie | 4494f59a26 | |
Joshua Lock | 13590b23c6 | |
Matthew McClintock | 2c3861ee68 | |
Matthew McClintock | 9cf7aabecf | |
Scott Rifenbark | 81d1a4aadf | |
Scott Rifenbark | 6578845f69 | |
Scott Rifenbark | b5a4e78df5 | |
Scott Rifenbark | 68b55c1e85 | |
Scott Rifenbark | 4234beb034 | |
Scott Rifenbark | ddb5143d9d | |
Scott Rifenbark | 25dcd673f5 | |
Scott Rifenbark | 1ad7977742 | |
Scott Rifenbark | fe40f117c1 | |
Scott Rifenbark | 0550d8c73e | |
Scott Rifenbark | bc821a2ab5 | |
Scott Rifenbark | aa72ed0b23 | |
Scott Rifenbark | e0a2bbd2a4 | |
Scott Rifenbark | 1a2454fcba | |
Scott Rifenbark | 92675a93ba | |
Scott Rifenbark | 1bafc89431 | |
Scott Rifenbark | dc785b64c1 | |
Scott Rifenbark | 257dbe8d39 | |
Scott Rifenbark | c81c4cb0c7 | |
Scott Rifenbark | da3edbd85b | |
Scott Rifenbark | 44aa4f320a | |
Scott Rifenbark | 38dbccd997 | |
Scott Rifenbark | 6c27a7b50e | |
Scott Rifenbark | 7ef3bc97b7 | |
Scott Rifenbark | 807b96f882 | |
Scott Rifenbark | 2add98ffc8 | |
Scott Rifenbark | ed7fe93178 | |
Scott Rifenbark | 397081ef41 | |
Scott Rifenbark | 570eeea297 | |
Scott Rifenbark | b9232eb2b4 | |
Scott Rifenbark | 405578286d | |
Scott Rifenbark | 1c937b6359 | |
Scott Rifenbark | 567200dcf2 | |
Scott Rifenbark | b4f5708c05 | |
Scott Rifenbark | b8cb28fc2f | |
Tom Zanussi | 495d37ab0b | |
Scott Rifenbark | 9d60cb9450 | |
Scott Rifenbark | 5592e80877 | |
Scott Rifenbark | 979ecf3eea | |
Scott Rifenbark | 770f5bb229 | |
Scott Rifenbark | 44211ed500 | |
Scott Rifenbark | ac715efc14 | |
Scott Rifenbark | b256ae8f80 | |
Scott Rifenbark | fa969ffb59 | |
Scott Rifenbark | e67311606e | |
Scott Rifenbark | b17aecd70a | |
Scott Rifenbark | 1cb265f575 | |
Scott Rifenbark | 31b7cac818 | |
Scott Rifenbark | 05738313c3 | |
Scott Rifenbark | 25cf1a65ec | |
Scott Rifenbark | 442730168e | |
Scott Rifenbark | 2ca5c8c03e | |
Scott Rifenbark | fa056279ea | |
Scott Rifenbark | 7ec098bedc | |
Scott Rifenbark | 68d048abfd | |
Scott Rifenbark | d5848aa719 | |
Scott Rifenbark | 9edf601d2d | |
Scott Rifenbark | 155d0deae8 | |
Scott Rifenbark | 85408dfd36 | |
Scott Rifenbark | 43fb63af31 | |
Scott Rifenbark | 8add7fccde | |
Scott Rifenbark | 01f5e6778c | |
Scott Rifenbark | 1ff81200ac | |
Scott Rifenbark | d2f1ca8cba | |
Scott Rifenbark | caab52f6cc | |
Scott Rifenbark | b4eb195b34 | |
Scott Rifenbark | 3dbabb693d | |
Scott Rifenbark | 5dd34a717e | |
Scott Rifenbark | e7cfb3b469 | |
Scott Rifenbark | c2494d3014 | |
Scott Rifenbark | 9d87cd9952 | |
Scott Rifenbark | 1851a96b47 | |
Scott Rifenbark | efd2d7ee05 | |
Scott Rifenbark | b16bc3d277 | |
Holger Hans Peter Freyther | a56c10d89a | |
Holger Hans Peter Freyther | 75abc06f34 | |
Holger Hans Peter Freyther | 23dbc00a4e | |
Holger Hans Peter Freyther | 73bcd9a75c | |
Holger Hans Peter Freyther | 6e64338f29 | |
Holger Hans Peter Freyther | 8f3955af20 | |
Holger Hans Peter Freyther | e77df798a7 | |
Holger Hans Peter Freyther | 9566715405 | |
Holger Hans Peter Freyther | c2d998c9b4 | |
Holger Hans Peter Freyther | 8946d523ed | |
Holger Hans Peter Freyther | 183d75b24d | |
Holger Hans Peter Freyther | dca725b368 | |
Holger Hans Peter Freyther | f58538edcd | |
Holger Hans Peter Freyther | 9d9e971a87 | |
Holger Hans Peter Freyther | f75ebc3f4b | |
Holger Hans Peter Freyther | 447e9fa77d | |
Scott Rifenbark | adcf8bf7b5 | |
Scott Rifenbark | fda17235fd | |
Holger Hans Peter Freyther | 8083eb0be3 | |
Holger Hans Peter Freyther | 1d9cbe012b | |
Holger Hans Peter Freyther | 0b0ef8aae6 | |
Holger Hans Peter Freyther | 6fe3d67299 | |
Holger Hans Peter Freyther | b49f5121bb | |
Holger Hans Peter Freyther | f52b4a6ba8 | |
Holger Hans Peter Freyther | ba3023edbc | |
Holger Hans Peter Freyther | 60ee4cd504 | |
Scott Rifenbark | c5bdef5617 | |
Scott Rifenbark | 77640e96dd | |
Scott Rifenbark | 98d9b82759 | |
Scott Rifenbark | 1aac5c310f | |
Scott Rifenbark | 1924f52cc8 | |
Scott Rifenbark | 6535ba6077 | |
Scott Rifenbark | eae4945a9d | |
Scott Rifenbark | 5ec43fdbb8 | |
Richard Purdie | 5ed59ae0f2 | |
Scott Rifenbark | e02d553b45 | |
Scott Rifenbark | 720446629b | |
Scott Rifenbark | db9d36f196 | |
Scott Rifenbark | 4cca048ab8 | |
Scott Rifenbark | 51b3d9dd53 | |
Bruce Ashfield | bc885cd8d3 | |
Scott Rifenbark | c657668a07 | |
Scott Rifenbark | 02e3d4dc70 | |
Scott Rifenbark | 57746012d0 | |
Scott Rifenbark | 8a48ec4297 | |
Scott Rifenbark | 61637a5241 | |
Scott Rifenbark | 8a475908b5 | |
Scott Rifenbark | b7d2cf0525 | |
Khem Raj | 4d7fbeda35 | |
Scott Rifenbark | baf536c62c | |
Scott Rifenbark | 0c48a6805e | |
Scott Rifenbark | 1ea2c63bf5 | |
Scott Rifenbark | f33f49a348 | |
Scott Rifenbark | cc6819ede7 | |
Scott Rifenbark | 2a68be025b | |
Scott Rifenbark | f05471dcf8 | |
Scott Rifenbark | 5fe2c53493 | |
Scott Rifenbark | 6b2ae5fd17 | |
Scott Rifenbark | 4eeeded4a7 | |
Scott Rifenbark | 931db10bd0 | |
Scott Rifenbark | 522268be49 | |
Scott Rifenbark | 8e17bffa42 | |
Scott Rifenbark | cc004358f1 | |
Scott Rifenbark | 0e623482d5 | |
Scott Rifenbark | ec31ee62d5 | |
Scott Rifenbark | a59ca8316b | |
Scott Rifenbark | 4423b5b024 | |
Scott Rifenbark | b47f39dbc3 | |
Scott Rifenbark | 9d72b706fa | |
Scott Rifenbark | 5d4888723b | |
Scott Rifenbark | 49e3171850 | |
Scott Rifenbark | 66ddb69916 | |
Scott Rifenbark | de1dcde413 | |
Scott Rifenbark | 9f36b1fe16 | |
Scott Rifenbark | 38c7a8a069 | |
Scott Rifenbark | 23bac7cb0e | |
Scott Rifenbark | 0021456aad | |
Scott Rifenbark | a568995f40 | |
Scott Rifenbark | f82ac840aa | |
Scott Rifenbark | cd2c80dedc | |
Scott Rifenbark | ed93525e65 | |
Scott Rifenbark | 4025831e90 | |
Scott Rifenbark | 94c381f71b | |
Scott Rifenbark | 588e21b339 | |
Richard Purdie | 3429095e86 | |
Richard Purdie | feb11f1079 | |
Richard Purdie | fbec475275 | |
Dongxiao Xu | 317fc4fbd0 | |
Dongxiao Xu | 909dd5b306 | |
Jessica Zhang | 24623d149d | |
Dongxiao Xu | 5687f68f3e | |
Dongxiao Xu | f282b7a027 | |
Dongxiao Xu | 32b1c9150f | |
Dongxiao Xu | 7a541d69dd | |
Joshua Lock | aa1cb68ce2 | |
Saul Wold | dc1f3a3bd0 | |
Saul Wold | 5fbb040355 | |
Bruce Ashfield | 9886c510f9 | |
Paul Eggleton | 49de6096b1 | |
Paul Eggleton | 7eb193fc49 | |
Joshua Lock | 4aa6a8e9a6 | |
Joshua Lock | a1f3aff110 | |
Joshua Lock | bb351c2f41 | |
Joshua Lock | bed552f8d0 | |
Scott Rifenbark | 41c564fe60 | |
Scott Rifenbark | cae817e833 | |
Scott Rifenbark | 7bb8b8f438 | |
Scott Rifenbark | c32652716d | |
Scott Rifenbark | 748fd4543b | |
Scott Rifenbark | 56f7ed979c | |
Scott Rifenbark | 3a15c9f8d0 | |
Scott Rifenbark | fc7ceaead0 | |
Scott Rifenbark | a626a5c208 | |
Scott Rifenbark | cb333ad6f3 | |
Scott Rifenbark | 5b58674c6b | |
Scott Rifenbark | 1017d2aec8 | |
Scott Rifenbark | 9786db045f | |
Scott Rifenbark | 158b84844e | |
Scott Rifenbark | 421c22d32c | |
Scott Rifenbark | 90ccadecc3 | |
Scott Rifenbark | 07638448b0 | |
Scott Rifenbark | f343aa4cc6 | |
Scott Rifenbark | 5cd07954ea | |
Scott Rifenbark | e0338b844f | |
Scott Rifenbark | cde57ddf84 | |
Scott Rifenbark | bee5046908 | |
Scott Rifenbark | 4e6b4c09a5 | |
Scott Rifenbark | 89496194ba | |
Scott Rifenbark | 19f9b25947 | |
Scott Rifenbark | f97e445fc6 | |
Scott Rifenbark | 2766a88a3b | |
Scott Rifenbark | b57c529115 | |
Scott Rifenbark | 319f4ee481 | |
Scott Rifenbark | 2cf26ef150 | |
Scott Rifenbark | 2c1b5b1054 | |
Scott Rifenbark | b8be92c34d | |
Scott Rifenbark | 6b4133b08f | |
Scott Rifenbark | 96d43c2410 | |
Richard Purdie | cde2aa61cf | |
Richard Purdie | 1d18aeafa6 | |
Richard Purdie | b8a67d3000 | |
Richard Purdie | c3c8084855 | |
Richard Purdie | cd0ef4d7c1 | |
Saul Wold | c9e35a126a | |
Richard Purdie | 4456226e45 | |
Saul Wold | bf8f071c5b | |
Elizabeth Flanagan | 3b1e8a214e | |
Darren Hart | 1015dfce8d | |
Richard Purdie | a0b1c14587 | |
Dexuan Cui | 66934fc311 | |
Richard Purdie | 80de0f946b | |
Richard Purdie | 5e65389335 | |
Zhai Edwin | d513e5f92c | |
Saul Wold | e9f8b99215 |
|
@ -1,27 +1,20 @@
|
||||||
*.pyc
|
*.pyc
|
||||||
*.pyo
|
*.pyo
|
||||||
/*.patch
|
build*/conf/local.conf
|
||||||
/build*/
|
build*/conf/bblayers.conf
|
||||||
pyshtables.py
|
build*/downloads
|
||||||
|
build*/tmp/
|
||||||
|
build*/sstate-cache
|
||||||
|
build*/pyshtables.py
|
||||||
pstage/
|
pstage/
|
||||||
scripts/oe-git-proxy-socks
|
scripts/oe-git-proxy-socks
|
||||||
sources/
|
sources/
|
||||||
meta-*/
|
!meta-*
|
||||||
!meta-skeleton
|
!meta-skeleton
|
||||||
!meta-selftest
|
!meta-demoapps
|
||||||
hob-image-*.bb
|
|
||||||
*.swp
|
*.swp
|
||||||
*.orig
|
*.orig
|
||||||
*.rej
|
*.rej
|
||||||
*~
|
*~
|
||||||
!meta-poky
|
|
||||||
!meta-yocto
|
|
||||||
!meta-yocto-bsp
|
|
||||||
!meta-yocto-imported
|
|
||||||
documentation/user-manual/user-manual.html
|
|
||||||
documentation/user-manual/user-manual.pdf
|
|
||||||
documentation/user-manual/user-manual.tgz
|
|
||||||
pull-*/
|
|
||||||
bitbake/lib/toaster/contrib/tts/backlog.txt
|
|
||||||
bitbake/lib/toaster/contrib/tts/log/*
|
|
||||||
bitbake/lib/toaster/contrib/tts/.cache/*
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
# Template settings
|
|
||||||
TEMPLATECONF=${TEMPLATECONF:-meta-poky/conf}
|
|
16
LICENSE
16
LICENSE
|
@ -1,14 +1,14 @@
|
||||||
Different components of OpenEmbedded are under different licenses (a mix
|
Different components of Poky are under different licenses (a mix of
|
||||||
of MIT and GPLv2). Please see:
|
MIT and GPLv2). Please see:
|
||||||
|
|
||||||
meta/COPYING.GPLv2 (GPLv2)
|
bitbake/COPYING (GPLv2)
|
||||||
meta/COPYING.MIT (MIT)
|
meta/COPYING.MIT (MIT)
|
||||||
meta-selftest/COPYING.MIT (MIT)
|
meta-extras/COPYING.MIT (MIT)
|
||||||
meta-skeleton/COPYING.MIT (MIT)
|
|
||||||
|
|
||||||
All metadata is MIT licensed unless otherwise stated. Source code
|
which cover the components in those subdirectories. This means all
|
||||||
included in tree for individual recipes is under the LICENSE stated in
|
metadata is MIT licensed unless otherwise stated. Source code included
|
||||||
the associated recipe (.bb file) unless otherwise stated.
|
in tree for individual recipes is under the LICENSE stated in the .bb
|
||||||
|
file for those software projects unless otherwise stated.
|
||||||
|
|
||||||
License information for any other files is either explicitly stated
|
License information for any other files is either explicitly stated
|
||||||
or defaults to GPL version 2.
|
or defaults to GPL version 2.
|
||||||
|
|
37
README
37
README
|
@ -18,41 +18,8 @@ e.g. for the hardware support. Poky is in turn a component of the Yocto Project.
|
||||||
|
|
||||||
The Yocto Project has extensive documentation about the system including a
|
The Yocto Project has extensive documentation about the system including a
|
||||||
reference manual which can be found at:
|
reference manual which can be found at:
|
||||||
http://yoctoproject.org/documentation
|
http://yoctoproject.org/community/documentation
|
||||||
|
|
||||||
OpenEmbedded-Core is a layer containing the core metadata for current versions
|
For information about OpenEmbedded see their website:
|
||||||
of OpenEmbedded. It is distro-less (can build a functional image with
|
|
||||||
DISTRO = "nodistro") and contains only emulated machine support.
|
|
||||||
|
|
||||||
For information about OpenEmbedded, see the OpenEmbedded website:
|
|
||||||
http://www.openembedded.org/
|
http://www.openembedded.org/
|
||||||
|
|
||||||
Where to Send Patches
|
|
||||||
=====================
|
|
||||||
|
|
||||||
As Poky is an integration repository (built using a tool called combo-layer),
|
|
||||||
patches against the various components should be sent to their respective
|
|
||||||
upstreams:
|
|
||||||
|
|
||||||
bitbake:
|
|
||||||
Git repository: http://git.openembedded.org/bitbake/
|
|
||||||
Mailing list: bitbake-devel@lists.openembedded.org
|
|
||||||
|
|
||||||
documentation:
|
|
||||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/
|
|
||||||
Mailing list: yocto@yoctoproject.org
|
|
||||||
|
|
||||||
meta-poky, meta-yocto-bsp:
|
|
||||||
Git repository: http://git.yoctoproject.org/cgit/cgit.cgi/meta-yocto(-bsp)
|
|
||||||
Mailing list: poky@yoctoproject.org
|
|
||||||
|
|
||||||
Everything else should be sent to the OpenEmbedded Core mailing list. If in
|
|
||||||
doubt, check the oe-core git repository for the content you intend to modify.
|
|
||||||
Before sending, be sure the patches apply cleanly to the current oe-core git
|
|
||||||
repository.
|
|
||||||
|
|
||||||
Git repository: http://git.openembedded.org/openembedded-core/
|
|
||||||
Mailing list: openembedded-core@lists.openembedded.org
|
|
||||||
|
|
||||||
Note: The scripts directory should be treated with extra care as it is a mix of
|
|
||||||
oe-core and poky-specific files.
|
|
||||||
|
|
510
README.hardware
510
README.hardware
|
@ -1,34 +1,28 @@
|
||||||
Poky Hardware README
|
Poky Hardware README
|
||||||
====================
|
====================
|
||||||
|
|
||||||
This file gives details about using Poky with the reference machines
|
This file gives details about using Poky with different hardware reference
|
||||||
supported out of the box. A full list of supported reference target machines
|
boards and consumer devices. A full list of target machines can be found by
|
||||||
can be found by looking in the following directories:
|
looking in the meta/conf/machine/ directory. If in doubt about using Poky with
|
||||||
|
your hardware, consult the documentation for your board/device.
|
||||||
meta/conf/machine/
|
|
||||||
meta-yocto-bsp/conf/machine/
|
|
||||||
|
|
||||||
If you are in doubt about using Poky/OpenEmbedded with your hardware, consult
|
|
||||||
the documentation for your board/device.
|
|
||||||
|
|
||||||
Support for additional devices is normally added by creating BSP layers - for
|
Support for additional devices is normally added by creating BSP layers - for
|
||||||
more information please see the Yocto Board Support Package (BSP) Developer's
|
more information please see the Yocto Board Support Package (BSP) Developer's
|
||||||
Guide - documentation source is in documentation/bspguide or download the PDF
|
Guide - documentation source is in documentation/bspguide or download the PDF
|
||||||
from:
|
from:
|
||||||
|
|
||||||
http://yoctoproject.org/documentation
|
http://yoctoproject.org/community/documentation
|
||||||
|
|
||||||
Support for physical reference hardware has now been split out into a
|
Support for machines other than QEMU may be moved out to separate BSP layers in
|
||||||
meta-yocto-bsp layer which can be removed separately from other layers if not
|
future versions.
|
||||||
needed.
|
|
||||||
|
|
||||||
|
|
||||||
QEMU Emulation Targets
|
QEMU Emulation Targets
|
||||||
======================
|
======================
|
||||||
|
|
||||||
To simplify development, the build system supports building images to
|
To simplify development Poky supports building images to work with the QEMU
|
||||||
work with the QEMU emulator in system emulation mode. Several architectures
|
emulator in system emulation mode. Several architectures are currently
|
||||||
are currently supported:
|
supported:
|
||||||
|
|
||||||
* ARM (qemuarm)
|
* ARM (qemuarm)
|
||||||
* x86 (qemux86)
|
* x86 (qemux86)
|
||||||
|
@ -36,40 +30,32 @@ are currently supported:
|
||||||
* PowerPC (qemuppc)
|
* PowerPC (qemuppc)
|
||||||
* MIPS (qemumips)
|
* MIPS (qemumips)
|
||||||
|
|
||||||
Use of the QEMU images is covered in the Yocto Project Reference Manual.
|
Use of the QEMU images is covered in the Poky Reference Manual. The Poky
|
||||||
The appropriate MACHINE variable value corresponding to the target is given
|
MACHINE setting corresponding to the target is given in brackets.
|
||||||
in brackets.
|
|
||||||
|
|
||||||
|
|
||||||
Hardware Reference Boards
|
Hardware Reference Boards
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
The following boards are supported by the meta-yocto-bsp layer:
|
The following boards are supported by Poky's core layer:
|
||||||
|
|
||||||
* Texas Instruments Beaglebone (beaglebone)
|
* Texas Instruments Beagleboard (beagleboard)
|
||||||
* Freescale MPC8315E-RDB (mpc8315e-rdb)
|
* Freescale MPC8315E-RDB (mpc8315e-rdb)
|
||||||
|
* Ubiquiti Networks RouterStation Pro (routerstationpro)
|
||||||
|
|
||||||
For more information see the board's section below. The appropriate MACHINE
|
For more information see the board's section below. The Poky MACHINE setting
|
||||||
variable value corresponding to the board is given in brackets.
|
corresponding to the board is given in brackets.
|
||||||
|
|
||||||
Reference Board Maintenance
|
|
||||||
===========================
|
|
||||||
|
|
||||||
Send pull requests, patches, comments or questions about meta-yocto-bsps to poky@yoctoproject.org
|
|
||||||
|
|
||||||
Maintainers: Kevin Hao <kexin.hao@windriver.com>
|
|
||||||
Bruce Ashfield <bruce.ashfield@windriver.com>
|
|
||||||
|
|
||||||
Consumer Devices
|
Consumer Devices
|
||||||
================
|
================
|
||||||
|
|
||||||
The following consumer devices are supported by the meta-yocto-bsp layer:
|
The following consumer devices are supported by Poky's core layer:
|
||||||
|
|
||||||
* Intel x86 based PCs and devices (genericx86)
|
* Intel Atom based PCs and devices (atom-pc)
|
||||||
* Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
|
||||||
|
|
||||||
For more information see the device's section below. The appropriate MACHINE
|
For more information see the device's section below. The Poky MACHINE setting
|
||||||
variable value corresponding to the device is given in brackets.
|
corresponding to the device is given in brackets.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -77,29 +63,22 @@ variable value corresponding to the device is given in brackets.
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
|
|
||||||
Intel x86 based PCs and devices (genericx86*)
|
Intel Atom based PCs and devices (atom-pc)
|
||||||
=============================================
|
==========================================
|
||||||
|
|
||||||
The genericx86 and genericx86-64 MACHINE are tested on the following platforms:
|
The atom-pc MACHINE is tested on the following platforms:
|
||||||
|
|
||||||
Intel Xeon/Core i-Series:
|
o Asus EeePC 901
|
||||||
+ Intel NUC5 Series - ix-52xx Series SOC (Broadwell)
|
o Acer Aspire One
|
||||||
+ Intel NUC6 Series - ix-62xx Series SOC (Skylake)
|
o Toshiba NB305
|
||||||
+ Intel Shumway Xeon Server
|
o Intel Embedded Development Board 1-N450 (Black Sand)
|
||||||
|
|
||||||
Intel Atom platforms:
|
and is likely to work on many unlisted Atom based devices. The MACHINE type
|
||||||
+ MinnowBoard MAX - E3825 SOC (Bay Trail)
|
supports ethernet, wifi, sound, and i915 graphics by default in addition to
|
||||||
+ MinnowBoard MAX - Turbot (ADI Engineering) - E3826 SOC (Bay Trail)
|
common PC input devices, busses, and so on.
|
||||||
- These boards can be either 32bot or 64bit modes depending on firmware
|
|
||||||
- See minnowboard.org for details
|
|
||||||
+ Intel Braswell SOC
|
|
||||||
|
|
||||||
and is likely to work on many unlisted Atom/Core/Xeon based devices. The MACHINE
|
|
||||||
type supports ethernet, wifi, sound, and Intel/vesa graphics by default in
|
|
||||||
addition to common PC input devices, busses, and so on.
|
|
||||||
|
|
||||||
Depending on the device, it can boot from a traditional hard-disk, a USB device,
|
Depending on the device, it can boot from a traditional hard-disk, a USB device,
|
||||||
or over the network. Writing generated images to physical media is
|
or over the network. Writing poky generated images to physical media is
|
||||||
straightforward with a caveat for USB devices. The following examples assume the
|
straightforward with a caveat for USB devices. The following examples assume the
|
||||||
target boot device is /dev/sdb, be sure to verify this and use the correct
|
target boot device is /dev/sdb, be sure to verify this and use the correct
|
||||||
device as the following commands are run as root and are not reversable.
|
device as the following commands are run as root and are not reversable.
|
||||||
|
@ -107,7 +86,7 @@ device as the following commands are run as root and are not reversable.
|
||||||
USB Device:
|
USB Device:
|
||||||
1. Build a live image. This image type consists of a simple filesystem
|
1. Build a live image. This image type consists of a simple filesystem
|
||||||
without a partition table, which is suitable for USB keys, and with the
|
without a partition table, which is suitable for USB keys, and with the
|
||||||
default setup for the genericx86 machine, this image type is built
|
default setup for the atom-pc machine, this image type is built
|
||||||
automatically for any image you build. For example:
|
automatically for any image you build. For example:
|
||||||
|
|
||||||
$ bitbake core-image-minimal
|
$ bitbake core-image-minimal
|
||||||
|
@ -115,11 +94,10 @@ USB Device:
|
||||||
2. Use the "dd" utility to write the image to the raw block device. For
|
2. Use the "dd" utility to write the image to the raw block device. For
|
||||||
example:
|
example:
|
||||||
|
|
||||||
# dd if=core-image-minimal-genericx86.hddimg of=/dev/sdb
|
# dd if=core-image-minimal-atom-pc.hddimg of=/dev/sdb
|
||||||
|
|
||||||
If the device fails to boot with "Boot error" displayed, or apparently
|
If the device fails to boot with "Boot error" displayed, it is likely the BIOS
|
||||||
stops just after the SYSLINUX version banner, it is likely the BIOS cannot
|
cannot understand the physical layout of the disk (or rather it expects a
|
||||||
understand the physical layout of the disk (or rather it expects a
|
|
||||||
particular layout and cannot handle anything else). There are two possible
|
particular layout and cannot handle anything else). There are two possible
|
||||||
solutions to this problem:
|
solutions to this problem:
|
||||||
|
|
||||||
|
@ -127,50 +105,123 @@ USB Device:
|
||||||
device, but the idea is to force BIOS to read the Cylinder/Head/Sector
|
device, but the idea is to force BIOS to read the Cylinder/Head/Sector
|
||||||
geometry from the device.
|
geometry from the device.
|
||||||
|
|
||||||
2. Use a ".wic" image with an EFI partition
|
2. Without such an option, the BIOS generally boots the device in USB-ZIP
|
||||||
|
mode.
|
||||||
|
|
||||||
a) With a default grub-efi bootloader:
|
a. Configure the USB device for USB-ZIP mode:
|
||||||
# dd if=core-image-minimal-genericx86-64.wic of=/dev/sdb
|
|
||||||
|
|
||||||
b) Use systemd-boot instead
|
# mkdiskimage -4 /dev/sdb 0 63 62
|
||||||
- Build an image with EFI_PROVIDER="systemd-boot" then use the above
|
|
||||||
dd command to write the image to a USB stick.
|
Where 63 and 62 are the head and sector count as reported by fdisk.
|
||||||
|
Remove and reinsert the device to allow the kernel to detect the new
|
||||||
|
partition layout.
|
||||||
|
|
||||||
|
b. Copy the contents of the poky image to the USB-ZIP mode device:
|
||||||
|
|
||||||
|
# mount -o loop core-image-minimal-atom-pc.hddimg /tmp/image
|
||||||
|
# mount /dev/sdb4 /tmp/usbkey
|
||||||
|
# cp -rf /tmp/image/* /tmp/usbkey
|
||||||
|
|
||||||
|
c. Install the syslinux boot loader:
|
||||||
|
|
||||||
|
# syslinux /dev/sdb4
|
||||||
|
|
||||||
|
Install the boot device in the target board and configure the BIOS to boot
|
||||||
|
from it.
|
||||||
|
|
||||||
|
For more details on the USB-ZIP scenario, see the syslinux documentation:
|
||||||
|
http://git.kernel.org/?p=boot/syslinux/syslinux.git;a=blob_plain;f=doc/usbkey.txt;hb=HEAD
|
||||||
|
|
||||||
|
|
||||||
Texas Instruments Beaglebone (beaglebone)
|
Texas Instruments Beagleboard (beagleboard)
|
||||||
=========================================
|
===========================================
|
||||||
|
|
||||||
The Beaglebone is an ARM Cortex-A8 development board with USB, Ethernet, 2D/3D
|
The Beagleboard is an ARM Cortex-A8 development board with USB, DVI-D, S-Video,
|
||||||
accelerated graphics, audio, serial, JTAG, and SD/MMC. The Black adds a faster
|
2D/3D accelerated graphics, audio, serial, JTAG, and SD/MMC. The xM adds a
|
||||||
CPU, more RAM, eMMC flash and a micro HDMI port. The beaglebone MACHINE is
|
faster CPU, more RAM, an ethernet port, more USB ports, microSD, and removes
|
||||||
tested on the following platforms:
|
the NAND flash. The beagleboard MACHINE is tested on the following platforms:
|
||||||
|
|
||||||
o Beaglebone Black A6
|
o Beagleboard C4
|
||||||
o Beaglebone A6 (the original "White" model)
|
o Beagleboard xM rev A & B
|
||||||
|
|
||||||
The Beaglebone Black has eMMC, while the White does not. Pressing the USER/BOOT
|
The Beagleboard C4 has NAND, while the xM does not. For the sake of simplicity,
|
||||||
button when powering on will temporarily change the boot order. But for the sake
|
these instructions assume you have erased the NAND on the C4 so its boot
|
||||||
of simplicity, these instructions assume you have erased the eMMC on the Black,
|
behavior matches that of the xM. To do this, issue the following commands from
|
||||||
so its boot behavior matches that of the White and boots off of SD card. To do
|
the u-boot prompt (note that the unlock may be unecessary depending on the
|
||||||
this, issue the following commands from the u-boot prompt:
|
version of u-boot installed on your board and only one of the erase commands
|
||||||
|
will succeed):
|
||||||
|
|
||||||
# mmc dev 1
|
# nand unlock
|
||||||
# mmc erase 0 512
|
# nand erase
|
||||||
|
# nand erase.chip
|
||||||
|
|
||||||
To further tailor these instructions for your board, please refer to the
|
To further tailor these instructions for your board, please refer to the
|
||||||
documentation at http://www.beagleboard.org/bone and http://www.beagleboard.org/black
|
documentation at http://www.beagleboard.org.
|
||||||
|
|
||||||
From a Linux system with access to the image files perform the following steps:
|
From a Linux system with access to the image files perform the following steps
|
||||||
|
as root, replacing mmcblk0* with the SD card device on your machine (such as sdc
|
||||||
|
if used via a usb card reader):
|
||||||
|
|
||||||
1. Build an image. For example:
|
1. Partition and format an SD card:
|
||||||
|
# fdisk -lu /dev/mmcblk0
|
||||||
|
|
||||||
$ bitbake core-image-minimal
|
Disk /dev/mmcblk0: 3951 MB, 3951034368 bytes
|
||||||
|
255 heads, 63 sectors/track, 480 cylinders, total 7716864 sectors
|
||||||
|
Units = sectors of 1 * 512 = 512 bytes
|
||||||
|
|
||||||
2. Use the "dd" utility to write the image to the SD card. For example:
|
Device Boot Start End Blocks Id System
|
||||||
|
/dev/mmcblk0p1 * 63 144584 72261 c Win95 FAT32 (LBA)
|
||||||
|
/dev/mmcblk0p2 144585 465884 160650 83 Linux
|
||||||
|
|
||||||
# dd core-image-minimal-beaglebone.wic of=/dev/sdb
|
# mkfs.vfat -F 16 -n "boot" /dev/mmcblk0p1
|
||||||
|
# mke2fs -j -L "root" /dev/mmcblk0p2
|
||||||
|
|
||||||
|
The following assumes the SD card partition 1 and 2 are mounted at
|
||||||
|
/media/boot and /media/root respectively. Removing the card and reinserting
|
||||||
|
it will do just that on most modern Linux desktop environments.
|
||||||
|
|
||||||
|
The files referenced below are made available after the build in
|
||||||
|
build/tmp/deploy/images.
|
||||||
|
|
||||||
|
2. Install the boot loaders
|
||||||
|
# cp MLO-beagleboard /media/boot/MLO
|
||||||
|
# cp u-boot-beagleboard.bin /media/boot/u-boot.bin
|
||||||
|
|
||||||
|
3. Install the root filesystem
|
||||||
|
# tar x -C /media/root -f core-image-$IMAGE_TYPE-beagleboard.tar.bz2
|
||||||
|
# tar x -C /media/root -f modules-$KERNEL_VERSION-beagleboard.tgz
|
||||||
|
|
||||||
|
4. Install the kernel uImage
|
||||||
|
# cp uImage-beagleboard.bin /media/boot/uImage
|
||||||
|
|
||||||
|
5. Prepare a u-boot script to simplify the boot process
|
||||||
|
The Beagleboard can be made to boot at this point from the u-boot command
|
||||||
|
shell. To automate this process, generate a user.scr script as follows.
|
||||||
|
|
||||||
|
Install uboot-mkimage (from uboot-mkimage on Ubuntu or uboot-tools on Fedora).
|
||||||
|
|
||||||
|
Prepare a script config:
|
||||||
|
|
||||||
|
# (cat << EOF
|
||||||
|
setenv bootcmd 'mmc init; fatload mmc 0:1 0x80300000 uImage; bootm 0x80300000'
|
||||||
|
setenv bootargs 'console=tty0 console=ttyO2,115200n8 root=/dev/mmcblk0p2 rootwait rootfstype=ext3 ro'
|
||||||
|
boot
|
||||||
|
EOF
|
||||||
|
) > serial-boot.cmd
|
||||||
|
# mkimage -A arm -O linux -T script -C none -a 0 -e 0 -n "Core Minimal" -d ./serial-boot.cmd ./boot.scr
|
||||||
|
# cp boot.scr /media/boot
|
||||||
|
|
||||||
|
6. Unmount the SD partitions, insert the SD card into the Beagleboard, and
|
||||||
|
boot the Beagleboard
|
||||||
|
|
||||||
|
Note: As of the 2.6.37 linux-yocto kernel recipe, the Beagleboard uses the
|
||||||
|
OMAP_SERIAL device (ttyO2). If you are using an older kernel, such as the
|
||||||
|
2.6.34 linux-yocto-stable, be sure to replace ttyO2 with ttyS2 above. You
|
||||||
|
should also override the machine SERIAL_CONSOLE in your local.conf in
|
||||||
|
order to setup the getty on the serial line:
|
||||||
|
|
||||||
|
SERIAL_CONSOLE_beagleboard = "115200 ttyS2"
|
||||||
|
|
||||||
3. Insert the SD card into the Beaglebone and boot the board.
|
|
||||||
|
|
||||||
Freescale MPC8315E-RDB (mpc8315e-rdb)
|
Freescale MPC8315E-RDB (mpc8315e-rdb)
|
||||||
=====================================
|
=====================================
|
||||||
|
@ -190,8 +241,8 @@ Setup instructions
|
||||||
You will need the following:
|
You will need the following:
|
||||||
* NFS root setup on your workstation
|
* NFS root setup on your workstation
|
||||||
* TFTP server installed on your workstation
|
* TFTP server installed on your workstation
|
||||||
* Straight-thru 9-conductor serial cable (DB9, M/F) connected from your
|
* Null modem cable connected from your workstation to the first serial port
|
||||||
PC to UART1
|
on the board
|
||||||
* Ethernet connected to the first ethernet port on the board
|
* Ethernet connected to the first ethernet port on the board
|
||||||
|
|
||||||
--- Preparation ---
|
--- Preparation ---
|
||||||
|
@ -203,82 +254,13 @@ linux/arch/powerpc/boot/dts/mpc8315erdb.dts within the kernel source). If
|
||||||
you have left them at the factory default then you shouldn't need to do
|
you have left them at the factory default then you shouldn't need to do
|
||||||
anything here.
|
anything here.
|
||||||
|
|
||||||
Note: To boot from USB disk you need u-boot that supports 'ext2load usb'
|
|
||||||
command. You need to setup TFTP server, load u-boot from there and
|
|
||||||
flash it to NOR flash.
|
|
||||||
|
|
||||||
Beware! Flashing bootloader is potentially dangerous operation that can
|
|
||||||
brick your device if done incorrectly. Please, make sure you understand
|
|
||||||
what below commands mean before executing them.
|
|
||||||
|
|
||||||
Load the new u-boot.bin from TFTP server to memory address 200000
|
|
||||||
=> tftp 200000 u-boot.bin
|
|
||||||
|
|
||||||
Disable flash protection
|
|
||||||
=> protect off all
|
|
||||||
|
|
||||||
Erase the old u-boot from fe000000 to fe06ffff in NOR flash.
|
|
||||||
The size is 0x70000 (458752 bytes)
|
|
||||||
=> erase fe000000 fe06ffff
|
|
||||||
|
|
||||||
Copy the new u-boot from address 200000 to fe000000
|
|
||||||
the size is 0x70000. It has to be greater or equal to u-boot.bin size
|
|
||||||
=> cp.b 200000 fe000000 70000
|
|
||||||
|
|
||||||
Enable flash protection again
|
|
||||||
=> protect on all
|
|
||||||
|
|
||||||
Reset the board
|
|
||||||
=> reset
|
|
||||||
|
|
||||||
--- Booting from USB disk ---
|
|
||||||
|
|
||||||
1. Flash partitioned image to the USB disk
|
|
||||||
|
|
||||||
# dd if=core-image-minimal-mpc8315e-rdb.wic of=/dev/sdb
|
|
||||||
|
|
||||||
2. Plug USB disk into the MPC8315 board
|
|
||||||
|
|
||||||
3. Connect the board's first serial port to your workstation and then start up
|
|
||||||
your favourite serial terminal so that you will be able to interact with
|
|
||||||
the serial console. If you don't have a favourite, picocom is suggested:
|
|
||||||
|
|
||||||
$ picocom /dev/ttyUSB0 -b 115200
|
|
||||||
|
|
||||||
4. Power up or reset the board and press a key on the terminal when prompted
|
|
||||||
to get to the U-Boot command line
|
|
||||||
|
|
||||||
5. Optional. Load the u-boot.bin from the USB disk:
|
|
||||||
|
|
||||||
=> usb start
|
|
||||||
=> ext2load usb 0:1 200000 u-boot.bin
|
|
||||||
|
|
||||||
and flash it to NOR flash as described above.
|
|
||||||
|
|
||||||
6. Set fdtaddr and loadaddr. This is not necessary if you set them before.
|
|
||||||
|
|
||||||
=> setenv fdtaddr a00000
|
|
||||||
=> setenv loadaddr 1000000
|
|
||||||
|
|
||||||
7. Load the kernel and dtb from first partition of the USB disk:
|
|
||||||
|
|
||||||
=> usb start
|
|
||||||
=> ext2load usb 0:1 $loadaddr uImage
|
|
||||||
=> ext2load usb 0:1 $fdtaddr dtb
|
|
||||||
|
|
||||||
8. Set bootargs and boot up the device
|
|
||||||
|
|
||||||
=> setenv bootargs root=/dev/sdb2 rw rootwait console=ttyS0,115200
|
|
||||||
=> bootm $loadaddr - $fdtaddr
|
|
||||||
|
|
||||||
|
|
||||||
--- Booting from NFS root ---
|
--- Booting from NFS root ---
|
||||||
|
|
||||||
Load the kernel and dtb (device tree blob), and boot the system as follows:
|
Load the kernel and dtb (device tree blob), and boot the system as follows:
|
||||||
|
|
||||||
1. Get the kernel (uImage-mpc8315e-rdb.bin) and dtb (uImage-mpc8315e-rdb.dtb)
|
1. Get the kernel (uImage-mpc8315e-rdb.bin) and dtb (uImage-mpc8315e-rdb.dtb)
|
||||||
files from the tmp/deploy directory, and make them available on your TFTP
|
files from the Poky build tmp/deploy directory, and make them available on
|
||||||
server.
|
your TFTP server.
|
||||||
|
|
||||||
2. Connect the board's first serial port to your workstation and then start up
|
2. Connect the board's first serial port to your workstation and then start up
|
||||||
your favourite serial terminal so that you will be able to interact with
|
your favourite serial terminal so that you will be able to interact with
|
||||||
|
@ -297,133 +279,169 @@ Load the kernel and dtb (device tree blob), and boot the system as follows:
|
||||||
|
|
||||||
5. Download the kernel and dtb, and boot:
|
5. Download the kernel and dtb, and boot:
|
||||||
|
|
||||||
=> tftp 1000000 uImage-mpc8315e-rdb.bin
|
=> tftp 800000 uImage-mpc8315e-rdb.bin
|
||||||
=> tftp 2000000 uImage-mpc8315e-rdb.dtb
|
=> tftp 780000 uImage-mpc8315e-rdb.dtb
|
||||||
=> bootm 1000000 - 2000000
|
=> bootm 800000 - 780000
|
||||||
|
|
||||||
--- Booting from JFFS2 root ---
|
|
||||||
|
|
||||||
1. First boot the board with NFS root.
|
|
||||||
|
|
||||||
2. Erase the MTD partition which will be used as root:
|
|
||||||
|
|
||||||
$ flash_eraseall /dev/mtd3
|
|
||||||
|
|
||||||
3. Copy the JFFS2 image to the MTD partition:
|
|
||||||
|
|
||||||
$ flashcp core-image-minimal-mpc8315e-rdb.jffs2 /dev/mtd3
|
|
||||||
|
|
||||||
4. Then reboot the board and set up the environment in U-Boot:
|
|
||||||
|
|
||||||
=> setenv bootargs root=/dev/mtdblock3 rootfstype=jffs2 console=ttyS0,115200
|
|
||||||
|
|
||||||
|
|
||||||
Ubiquiti Networks EdgeRouter Lite (edgerouter)
|
Ubiquiti Networks RouterStation Pro (routerstationpro)
|
||||||
==============================================
|
======================================================
|
||||||
|
|
||||||
The EdgeRouter Lite is part of the EdgeMax series. It is a MIPS64 router
|
The RouterStation Pro is an Atheros AR7161 MIPS-based board. Geared towards
|
||||||
(based on the Cavium Octeon processor) with 512MB of RAM, which uses an
|
networking applications, it has all of the usual features as well as three
|
||||||
internal USB pendrive for storage.
|
type IIIA mini-PCI slots and an on-board 3-port 10/100/1000 Ethernet switch,
|
||||||
|
in addition to the 10/100/1000 Ethernet WAN port which supports
|
||||||
|
Power-over-Ethernet.
|
||||||
|
|
||||||
Setup instructions
|
Setup instructions
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
You will need the following:
|
You will need the following:
|
||||||
* RJ45 -> serial ("rollover") cable connected from your PC to the CONSOLE
|
* A serial cable - female to female (or female to male + gender changer)
|
||||||
port on the device
|
NOTE: cable must be straight through, *not* a null modem cable.
|
||||||
* Ethernet connected to the first ethernet port on the board
|
* USB flash drive or hard disk that is able to be powered from the
|
||||||
|
board's USB port.
|
||||||
|
* tftp server installed on your workstation
|
||||||
|
|
||||||
If using NFS as part of the setup process, you will also need:
|
NOTE: in the following instructions it is assumed that /dev/sdb corresponds
|
||||||
* NFS root setup on your workstation
|
to the USB disk when it is plugged into your workstation. If this is not the
|
||||||
* TFTP server installed on your workstation (if fetching the kernel from
|
case in your setup then please be careful to substitute the correct device
|
||||||
TFTP, see below).
|
name in all commands where appropriate.
|
||||||
|
|
||||||
--- Preparation ---
|
--- Preparation ---
|
||||||
|
|
||||||
Build an image (e.g. core-image-minimal) using "edgerouter" as the MACHINE.
|
1) Build an image (e.g. core-image-minimal) using "routerstationpro" as the
|
||||||
In the following instruction it is based on core-image-minimal. Another target
|
MACHINE
|
||||||
may be similiar with it.
|
|
||||||
|
|
||||||
--- Booting from NFS root / kernel via TFTP ---
|
2) Partition the USB drive so that primary partition 1 is type Linux (83).
|
||||||
|
Minimum size depends on your root image size - core-image-minimal probably
|
||||||
|
only needs 8-16MB, other images will need more.
|
||||||
|
|
||||||
Load the kernel, and boot the system as follows:
|
# fdisk /dev/sdb
|
||||||
|
Command (m for help): p
|
||||||
|
|
||||||
1. Get the kernel (vmlinux) file from the tmp/deploy/images/edgerouter
|
Disk /dev/sdb: 4011 MB, 4011491328 bytes
|
||||||
directory, and make them available on your TFTP server.
|
124 heads, 62 sectors/track, 1019 cylinders, total 7834944 sectors
|
||||||
|
Units = sectors of 1 * 512 = 512 bytes
|
||||||
|
Sector size (logical/physical): 512 bytes / 512 bytes
|
||||||
|
I/O size (minimum/optimal): 512 bytes / 512 bytes
|
||||||
|
Disk identifier: 0x0009e87d
|
||||||
|
|
||||||
2. Connect the board's first serial port to your workstation and then start up
|
Device Boot Start End Blocks Id System
|
||||||
your favourite serial terminal so that you will be able to interact with
|
/dev/sdb1 62 1952751 976345 83 Linux
|
||||||
the serial console. If you don't have a favourite, picocom is suggested:
|
|
||||||
|
|
||||||
$ picocom /dev/ttyS0 -b 115200
|
3) Format partition 1 on the USB as ext3
|
||||||
|
|
||||||
3. Power up or reset the board and press a key on the terminal when prompted
|
# mke2fs -j /dev/sdb1
|
||||||
to get to the U-Boot command line
|
|
||||||
|
|
||||||
4. Set up the environment in U-Boot:
|
4) Mount partition 1 and then extract the contents of
|
||||||
|
tmp/deploy/images/core-image-XXXX.tar.bz2 into it (preserving permissions).
|
||||||
|
|
||||||
=> setenv ipaddr <board ip>
|
# mount /dev/sdb1 /media/sdb1
|
||||||
=> setenv serverip <tftp server ip>
|
# cd /media/sdb1
|
||||||
|
# tar -xvjpf tmp/deploy/images/core-image-XXXX.tar.bz2
|
||||||
|
|
||||||
5. Download the kernel and boot:
|
5) Unmount the USB drive and then plug it into the board's USB port
|
||||||
|
|
||||||
=> tftp tftp $loadaddr vmlinux
|
6) Connect the board's serial port to your workstation and then start up
|
||||||
=> bootoctlinux $loadaddr coremask=0x3 root=/dev/nfs rw nfsroot=<nfsroot ip>:<rootfs path> ip=<board ip>:<server ip>:<gateway ip>:<netmask>:edgerouter:eth0:off mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)
|
your favourite serial terminal so that you will be able to interact with
|
||||||
|
the serial console. If you don't have a favourite, picocom is suggested:
|
||||||
|
|
||||||
--- Booting from USB disk ---
|
$ picocom /dev/ttyUSB0 -b 115200
|
||||||
|
|
||||||
To boot from the USB disk, you either need to remove it from the edgerouter
|
7) Connect the network into eth0 (the one that is NOT the 3 port switch). If
|
||||||
box and populate it from another computer, or use a previously booted NFS
|
you are using power-over-ethernet then the board will power up at this point.
|
||||||
image and populate from the edgerouter itself.
|
|
||||||
|
|
||||||
Type 1: Use partitioned image
|
8) Start up the board, watch the serial console. Hit Ctrl+C to abort the
|
||||||
-----------------------------
|
autostart if the board is configured that way (it is by default). The
|
||||||
|
bootloader's fconfig command can be used to disable autostart and configure
|
||||||
|
the IP settings if you need to change them (default IP is 192.168.1.20).
|
||||||
|
|
||||||
Steps:
|
9) Make the kernel (tmp/deploy/images/vmlinux-routerstationpro.bin) available
|
||||||
|
on the tftp server.
|
||||||
|
|
||||||
1. Remove the USB disk from the edgerouter and insert it into a computer
|
10) If you are going to write the kernel to flash (optional - see "Booting a
|
||||||
that has access to your build artifacts.
|
kernel directly" below for the alternative), remove the current kernel and
|
||||||
|
rootfs flash partitions. You can list the partitions using the following
|
||||||
|
bootloader command:
|
||||||
|
|
||||||
2. Flash the image.
|
RedBoot> fis list
|
||||||
|
|
||||||
# dd if=core-image-minimal-edgerouter.wic of=/dev/sdb
|
You can delete the existing kernel and rootfs with these commands:
|
||||||
|
|
||||||
3. Insert USB disk into the edgerouter and boot it.
|
RedBoot> fis delete kernel
|
||||||
|
RedBoot> fis delete rootfs
|
||||||
|
|
||||||
Type 2: NFS
|
--- Booting a kernel directly ---
|
||||||
-----------
|
|
||||||
|
|
||||||
Note: If you place the kernel on the ext3 partition, you must re-create the
|
1) Load the kernel using the following bootloader command:
|
||||||
ext3 filesystem, since the factory u-boot can only handle 128 byte inodes and
|
|
||||||
cannot read the partition otherwise.
|
|
||||||
|
|
||||||
These boot instructions assume that you have recreated the ext3 filesystem with
|
RedBoot> load -m tftp -h <ip of tftp server> vmlinux-routerstationpro.bin
|
||||||
128 byte inodes, you have an updated uboot or you are running and image capable
|
|
||||||
of making the filesystem on the board itself.
|
|
||||||
|
|
||||||
|
You should see a message on it being successfully loaded.
|
||||||
|
|
||||||
1. Boot from NFS root
|
2) Execute the kernel:
|
||||||
|
|
||||||
2. Mount the USB disk partition 2 and then extract the contents of
|
RedBoot> exec -c "console=ttyS0,115200 root=/dev/sda1 rw rootdelay=2 board=UBNT-RSPRO"
|
||||||
tmp/deploy/core-image-XXXX.tar.bz2 into it.
|
|
||||||
|
|
||||||
Before starting, copy core-image-minimal-xxx.tar.bz2 and vmlinux into
|
Note that specifying the command line with -c is important as linux-yocto does
|
||||||
rootfs path on your workstation.
|
not provide a default command line.
|
||||||
|
|
||||||
and then,
|
--- Writing a kernel to flash ---
|
||||||
|
|
||||||
# mount /dev/sda2 /media/sda2
|
1) Go to your tftp server and gzip the kernel you want in flash. It should
|
||||||
# tar -xvjpf core-image-minimal-XXX.tar.bz2 -C /media/sda2
|
halve the size.
|
||||||
# cp vmlinux /media/sda2/boot/vmlinux
|
|
||||||
# umount /media/sda2
|
|
||||||
# reboot
|
|
||||||
|
|
||||||
3. Reboot the board and press a key on the terminal when prompted to get to the U-Boot
|
2) Load the kernel using the following bootloader command:
|
||||||
command line:
|
|
||||||
|
|
||||||
# reboot
|
RedBoot> load -r -b 0x80600000 -m tftp -h <ip of tftp server> vmlinux-routerstationpro.bin.gz
|
||||||
|
|
||||||
4. Load the kernel and boot:
|
This should output something similar to the following:
|
||||||
|
|
||||||
|
Raw file loaded 0x80600000-0x8087c537, assumed entry at 0x80600000
|
||||||
|
|
||||||
|
Calculate the length by subtracting the first number from the second number
|
||||||
|
and then rounding the result up to the nearest 0x1000.
|
||||||
|
|
||||||
|
3) Using the length calculated above, create a flash partition for the kernel:
|
||||||
|
|
||||||
|
RedBoot> fis create -b 0x80600000 -l 0x240000 kernel
|
||||||
|
|
||||||
|
(change 0x240000 to your rounded length -- change "kernel" to whatever
|
||||||
|
you want to name your kernel)
|
||||||
|
|
||||||
|
--- Booting a kernel from flash ---
|
||||||
|
|
||||||
|
To boot the flashed kernel perform the following steps.
|
||||||
|
|
||||||
|
1) At the bootloader prompt, load the kernel:
|
||||||
|
|
||||||
|
RedBoot> fis load -d -e kernel
|
||||||
|
|
||||||
|
(Change the name "kernel" above if you chose something different earlier)
|
||||||
|
|
||||||
|
(-e means 'elf', -d 'decompress')
|
||||||
|
|
||||||
|
2) Execute the kernel using the exec command as above.
|
||||||
|
|
||||||
|
--- Automating the boot process ---
|
||||||
|
|
||||||
|
After writing the kernel to flash and testing the load and exec commands
|
||||||
|
manually, you can automate the boot process with a boot script.
|
||||||
|
|
||||||
|
1) RedBoot> fconfig
|
||||||
|
(Answer the questions not specified here as they pertain to your environment)
|
||||||
|
2) Run script at boot: true
|
||||||
|
Boot script:
|
||||||
|
.. fis load -d -e kernel
|
||||||
|
.. exec
|
||||||
|
Enter script, terminate with empty line
|
||||||
|
>> fis load -d -e kernel
|
||||||
|
>> exec -c "console=ttyS0,115200 root=/dev/sda1 rw rootdelay=2 board=UBNT-RSPRO"
|
||||||
|
>>
|
||||||
|
3) Answer the remaining questions and write the changes to flash:
|
||||||
|
Update RedBoot non-volatile configuration - continue (y/n)? y
|
||||||
|
... Erase from 0xbfff0000-0xc0000000: .
|
||||||
|
... Program from 0x87ff0000-0x88000000 at 0xbfff0000: .
|
||||||
|
4) Power cycle the board.
|
||||||
|
|
||||||
=> ext2load usb 0:2 $loadaddr boot/vmlinux
|
|
||||||
=> bootoctlinux $loadaddr coremask=0x3 root=/dev/sda2 rw rootwait mtdparts=phys_mapped_flash:512k(boot0),512k(boot1),64k@3072k(eeprom)
|
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
BitBake is licensed under the GNU General Public License version 2.0. See COPYING for further details.
|
|
||||||
|
|
||||||
The following external components are distributed with this software:
|
|
||||||
|
|
||||||
* The Toaster Simple UI application is based upon the Django project template, the files of which are covered by the BSD license and are copyright (c) Django Software
|
|
||||||
Foundation and individual contributors.
|
|
||||||
|
|
||||||
* Twitter Bootstrap (including Glyphicons), redistributed under the MIT license
|
|
||||||
* jQuery is redistributed under the MIT license.
|
|
||||||
|
|
||||||
* Twitter typeahead.js redistributed under the MIT license. Note that the JS source has one small modification, so the full unminified file is currently included to make it obvious where this is.
|
|
||||||
|
|
||||||
* jsrender is redistributed under the MIT license.
|
|
||||||
|
|
||||||
* QUnit is redistributed under the MIT license.
|
|
||||||
|
|
||||||
* Font Awesome fonts redistributed under the SIL Open Font License 1.1
|
|
||||||
|
|
||||||
* simplediff is distributed under the zlib license.
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
#
|
#
|
||||||
|
@ -23,34 +23,222 @@
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys, logging
|
||||||
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
|
||||||
'lib'))
|
'lib'))
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import warnings
|
||||||
|
from traceback import format_exception
|
||||||
try:
|
try:
|
||||||
import bb
|
import bb
|
||||||
except RuntimeError as exc:
|
except RuntimeError as exc:
|
||||||
sys.exit(str(exc))
|
sys.exit(str(exc))
|
||||||
|
from bb import event
|
||||||
|
import bb.msg
|
||||||
|
from bb import cooker
|
||||||
|
from bb import ui
|
||||||
|
from bb import server
|
||||||
|
|
||||||
from bb import cookerdata
|
__version__ = "1.13.3"
|
||||||
from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException
|
logger = logging.getLogger("BitBake")
|
||||||
|
|
||||||
if sys.getfilesystemencoding() != "utf-8":
|
|
||||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
|
||||||
|
|
||||||
__version__ = "1.34.0"
|
class BBConfiguration(object):
|
||||||
|
"""
|
||||||
|
Manages build options and configurations for one run
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, options):
|
||||||
|
for key, val in options.__dict__.items():
|
||||||
|
setattr(self, key, val)
|
||||||
|
self.pkgs_to_build = []
|
||||||
|
|
||||||
|
|
||||||
|
def get_ui(config):
|
||||||
|
if config.ui:
|
||||||
|
interface = config.ui
|
||||||
|
else:
|
||||||
|
interface = 'knotty'
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Dynamically load the UI based on the ui name. Although we
|
||||||
|
# suggest a fixed set this allows you to have flexibility in which
|
||||||
|
# ones are available.
|
||||||
|
module = __import__("bb.ui", fromlist = [interface])
|
||||||
|
return getattr(module, interface).main
|
||||||
|
except AttributeError:
|
||||||
|
sys.exit("FATAL: Invalid user interface '%s' specified.\n"
|
||||||
|
"Valid interfaces: depexp, goggle, ncurses, knotty [default]." % interface)
|
||||||
|
|
||||||
|
|
||||||
|
# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
|
||||||
|
warnlog = logging.getLogger("BitBake.Warnings")
|
||||||
|
_warnings_showwarning = warnings.showwarning
|
||||||
|
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||||
|
if file is not None:
|
||||||
|
if _warnings_showwarning is not None:
|
||||||
|
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||||
|
else:
|
||||||
|
s = warnings.formatwarning(message, category, filename, lineno)
|
||||||
|
warnlog.warn(s)
|
||||||
|
|
||||||
|
warnings.showwarning = _showwarning
|
||||||
|
warnings.filterwarnings("ignore")
|
||||||
|
warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
|
||||||
|
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||||
|
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||||
|
warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
|
||||||
|
warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = optparse.OptionParser(
|
||||||
|
version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
|
||||||
|
usage = """%prog [options] [package ...]
|
||||||
|
|
||||||
|
Executes the specified task (default is 'build') for a given set of BitBake files.
|
||||||
|
It expects that BBFILES is defined, which is a space separated list of files to
|
||||||
|
be executed. BBFILES does support wildcards.
|
||||||
|
Default BBFILES are the .bb files in the current directory.""")
|
||||||
|
|
||||||
|
parser.add_option("-b", "--buildfile", help = "execute the task against this .bb file, rather than a package from BBFILES. Does not handle any dependencies.",
|
||||||
|
action = "store", dest = "buildfile", default = None)
|
||||||
|
|
||||||
|
parser.add_option("-k", "--continue", help = "continue as much as possible after an error. While the target that failed, and those that depend on it, cannot be remade, the other dependencies of these targets can be processed all the same.",
|
||||||
|
action = "store_false", dest = "abort", default = True)
|
||||||
|
|
||||||
|
parser.add_option("-a", "--tryaltconfigs", help = "continue with builds by trying to use alternative providers where possible.",
|
||||||
|
action = "store_true", dest = "tryaltconfigs", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-f", "--force", help = "force run of specified cmd, regardless of stamp status",
|
||||||
|
action = "store_true", dest = "force", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtasks tasks is defined and will show available tasks",
|
||||||
|
action = "store", dest = "cmd")
|
||||||
|
|
||||||
|
parser.add_option("-r", "--read", help = "read the specified file before bitbake.conf",
|
||||||
|
action = "append", dest = "prefile", default = [])
|
||||||
|
|
||||||
|
parser.add_option("-R", "--postread", help = "read the specified file after bitbake.conf",
|
||||||
|
action = "append", dest = "postfile", default = [])
|
||||||
|
|
||||||
|
parser.add_option("-v", "--verbose", help = "output more chit-chat to the terminal",
|
||||||
|
action = "store_true", dest = "verbose", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
|
||||||
|
action = "count", dest="debug", default = 0)
|
||||||
|
|
||||||
|
parser.add_option("-n", "--dry-run", help = "don't execute, just go through the motions",
|
||||||
|
action = "store_true", dest = "dry_run", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-S", "--dump-signatures", help = "don't execute, just dump out the signature construction information",
|
||||||
|
action = "store_true", dest = "dump_signatures", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-p", "--parse-only", help = "quit after parsing the BB files (developers only)",
|
||||||
|
action = "store_true", dest = "parse_only", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-s", "--show-versions", help = "show current and preferred versions of all packages",
|
||||||
|
action = "store_true", dest = "show_versions", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
|
||||||
|
action = "store_true", dest = "show_environment", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
|
||||||
|
action = "store_true", dest = "dot_graph", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
|
||||||
|
action = "append", dest = "extra_assume_provided", default = [])
|
||||||
|
|
||||||
|
parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
|
||||||
|
action = "append", dest = "debug_domains", default = [])
|
||||||
|
|
||||||
|
parser.add_option("-P", "--profile", help = "profile the command and print a report",
|
||||||
|
action = "store_true", dest = "profile", default = False)
|
||||||
|
|
||||||
|
parser.add_option("-u", "--ui", help = "userinterface to use",
|
||||||
|
action = "store", dest = "ui")
|
||||||
|
|
||||||
|
parser.add_option("-t", "--servertype", help = "Choose which server to use, none, process or xmlrpc",
|
||||||
|
action = "store", dest = "servertype")
|
||||||
|
|
||||||
|
parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
|
||||||
|
action = "store_true", dest = "revisions_changed", default = False)
|
||||||
|
|
||||||
|
options, args = parser.parse_args(sys.argv)
|
||||||
|
|
||||||
|
configuration = BBConfiguration(options)
|
||||||
|
configuration.pkgs_to_build.extend(args[1:])
|
||||||
|
|
||||||
|
ui_main = get_ui(configuration)
|
||||||
|
|
||||||
|
# Server type can be xmlrpc, process or none currently, if nothing is specified,
|
||||||
|
# the default server is process
|
||||||
|
if configuration.servertype:
|
||||||
|
server_type = configuration.servertype
|
||||||
|
else:
|
||||||
|
server_type = 'process'
|
||||||
|
|
||||||
|
try:
|
||||||
|
module = __import__("bb.server", fromlist = [server_type])
|
||||||
|
server = getattr(module, server_type)
|
||||||
|
except AttributeError:
|
||||||
|
sys.exit("FATAL: Invalid server type '%s' specified.\n"
|
||||||
|
"Valid interfaces: xmlrpc, process [default], none." % servertype)
|
||||||
|
|
||||||
|
# Save a logfile for cooker into the current working directory. When the
|
||||||
|
# server is daemonized this logfile will be truncated.
|
||||||
|
cooker_logfile = os.path.join(os.getcwd(), "cooker.log")
|
||||||
|
|
||||||
|
bb.msg.init_msgconfig(configuration.verbose, configuration.debug,
|
||||||
|
configuration.debug_domains)
|
||||||
|
|
||||||
|
# Ensure logging messages get sent to the UI as events
|
||||||
|
handler = bb.event.LogHandler()
|
||||||
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
# Before we start modifying the environment we should take a pristine
|
||||||
|
# copy for possible later use
|
||||||
|
initialenv = os.environ.copy()
|
||||||
|
# Clear away any spurious environment variables. But don't wipe the
|
||||||
|
# environment totally. This is necessary to ensure the correct operation
|
||||||
|
# of the UIs (e.g. for DISPLAY, etc.)
|
||||||
|
bb.utils.clean_environment()
|
||||||
|
|
||||||
|
server = server.BitBakeServer()
|
||||||
|
|
||||||
|
server.initServer()
|
||||||
|
idle = server.getServerIdleCB()
|
||||||
|
|
||||||
|
cooker = bb.cooker.BBCooker(configuration, idle, initialenv)
|
||||||
|
cooker.parseCommandLine()
|
||||||
|
|
||||||
|
server.addcooker(cooker)
|
||||||
|
server.saveConnectionDetails()
|
||||||
|
server.detach(cooker_logfile)
|
||||||
|
|
||||||
|
# Should no longer need to ever reference cooker
|
||||||
|
del cooker
|
||||||
|
|
||||||
|
logger.removeHandler(handler)
|
||||||
|
|
||||||
|
# Setup a connection to the server (cooker)
|
||||||
|
server_connection = server.establishConnection()
|
||||||
|
|
||||||
|
try:
|
||||||
|
return server.launchUI(ui_main, server_connection.connection, server_connection.events)
|
||||||
|
finally:
|
||||||
|
bb.event.ui_queue = []
|
||||||
|
server_connection.terminate()
|
||||||
|
|
||||||
|
return 1
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if __version__ != bb.__version__:
|
|
||||||
sys.exit("Bitbake core version and program version mismatch!")
|
|
||||||
try:
|
try:
|
||||||
sys.exit(bitbake_main(BitBakeConfigParameters(sys.argv),
|
ret = main()
|
||||||
cookerdata.CookerConfiguration()))
|
|
||||||
except BBMainException as err:
|
|
||||||
sys.exit(err)
|
|
||||||
except bb.BBHandledException:
|
|
||||||
sys.exit(1)
|
|
||||||
except Exception:
|
except Exception:
|
||||||
|
ret = 1
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc(5)
|
||||||
sys.exit(1)
|
sys.exit(ret)
|
||||||
|
|
||||||
|
|
|
@ -1,162 +1,12 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# bitbake-diffsigs
|
|
||||||
# BitBake task signature data comparison utility
|
|
||||||
#
|
|
||||||
# Copyright (C) 2012-2013, 2017 Intel Corporation
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
import fnmatch
|
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import pickle
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||||
|
|
||||||
import bb.tinfoil
|
|
||||||
import bb.siggen
|
import bb.siggen
|
||||||
import bb.msg
|
|
||||||
|
|
||||||
logger = bb.msg.logger_create('bitbake-diffsigs')
|
if len(sys.argv) > 2:
|
||||||
|
bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2])
|
||||||
def find_compare_task(bbhandler, pn, taskname, sig1=None, sig2=None, color=False):
|
|
||||||
""" Find the most recent signature files for the specified PN/task and compare them """
|
|
||||||
|
|
||||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
|
||||||
logger.error('Metadata does not support finding signature data files')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not taskname.startswith('do_'):
|
|
||||||
taskname = 'do_%s' % taskname
|
|
||||||
|
|
||||||
if sig1 and sig2:
|
|
||||||
sigfiles = bb.siggen.find_siginfo(pn, taskname, [sig1, sig2], bbhandler.config_data)
|
|
||||||
if len(sigfiles) == 0:
|
|
||||||
logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2))
|
|
||||||
sys.exit(1)
|
|
||||||
elif not sig1 in sigfiles:
|
|
||||||
logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1))
|
|
||||||
sys.exit(1)
|
|
||||||
elif not sig2 in sigfiles:
|
|
||||||
logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2))
|
|
||||||
sys.exit(1)
|
|
||||||
latestfiles = [sigfiles[sig1], sigfiles[sig2]]
|
|
||||||
else:
|
|
||||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
|
||||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:]
|
|
||||||
if not latestfiles:
|
|
||||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
|
||||||
sys.exit(1)
|
|
||||||
elif len(latestfiles) < 2:
|
|
||||||
logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Define recursion callback
|
|
||||||
def recursecb(key, hash1, hash2):
|
|
||||||
hashes = [hash1, hash2]
|
|
||||||
hashfiles = bb.siggen.find_siginfo(key, None, hashes, bbhandler.config_data)
|
|
||||||
|
|
||||||
recout = []
|
|
||||||
if len(hashfiles) == 0:
|
|
||||||
recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2))
|
|
||||||
elif not hash1 in hashfiles:
|
|
||||||
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1))
|
|
||||||
elif not hash2 in hashfiles:
|
|
||||||
recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2))
|
|
||||||
else:
|
|
||||||
out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color)
|
|
||||||
for change in out2:
|
|
||||||
for line in change.splitlines():
|
|
||||||
recout.append(' ' + line)
|
|
||||||
|
|
||||||
return recout
|
|
||||||
|
|
||||||
# Recurse into signature comparison
|
|
||||||
logger.debug("Signature file (previous): %s" % latestfiles[-2])
|
|
||||||
logger.debug("Signature file (latest): %s" % latestfiles[-1])
|
|
||||||
output = bb.siggen.compare_sigfiles(latestfiles[-2], latestfiles[-1], recursecb, color=color)
|
|
||||||
if output:
|
|
||||||
print('\n'.join(output))
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Compares siginfo/sigdata files written out by BitBake")
|
|
||||||
|
|
||||||
parser.add_argument('-d', '--debug',
|
|
||||||
help='Enable debug output',
|
|
||||||
action='store_true')
|
|
||||||
|
|
||||||
parser.add_argument('--color',
|
|
||||||
help='Colorize output (where %(metavar)s is %(choices)s)',
|
|
||||||
choices=['auto', 'always', 'never'], default='auto', metavar='color')
|
|
||||||
|
|
||||||
parser.add_argument("-t", "--task",
|
|
||||||
help="find the signature data files for last two runs of the specified task and compare them",
|
|
||||||
action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname'))
|
|
||||||
|
|
||||||
parser.add_argument("-s", "--signature",
|
|
||||||
help="With -t/--task, specify the signatures to look for instead of taking the last two",
|
|
||||||
action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig'))
|
|
||||||
|
|
||||||
parser.add_argument("sigdatafile1",
|
|
||||||
help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.",
|
|
||||||
action="store", nargs='?')
|
|
||||||
|
|
||||||
parser.add_argument("sigdatafile2",
|
|
||||||
help="Second signature file to compare",
|
|
||||||
action="store", nargs='?')
|
|
||||||
|
|
||||||
|
|
||||||
options = parser.parse_args()
|
|
||||||
|
|
||||||
if options.debug:
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
color = (options.color == 'always' or (options.color == 'auto' and sys.stdout.isatty()))
|
|
||||||
|
|
||||||
if options.taskargs:
|
|
||||||
with bb.tinfoil.Tinfoil() as tinfoil:
|
|
||||||
tinfoil.prepare(config_only=True)
|
|
||||||
if options.sigargs:
|
|
||||||
find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1], color=color)
|
|
||||||
else:
|
|
||||||
find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1], color=color)
|
|
||||||
else:
|
else:
|
||||||
if options.sigargs:
|
bb.siggen.dump_sigfile(sys.argv[1])
|
||||||
logger.error('-s/--signature can only be used together with -t/--task')
|
|
||||||
sys.exit(1)
|
|
||||||
try:
|
|
||||||
if options.sigdatafile1 and options.sigdatafile2:
|
|
||||||
output = bb.siggen.compare_sigfiles(options.sigdatafile1, options.sigdatafile2, color=color)
|
|
||||||
elif options.sigdatafile1:
|
|
||||||
output = bb.siggen.dump_sigfile(options.sigdatafile1)
|
|
||||||
else:
|
|
||||||
logger.error('Must specify signature file(s) or -t/--task')
|
|
||||||
parser.print_help()
|
|
||||||
sys.exit(1)
|
|
||||||
except IOError as e:
|
|
||||||
logger.error(str(e))
|
|
||||||
sys.exit(1)
|
|
||||||
except (pickle.UnpicklingError, EOFError):
|
|
||||||
logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if output:
|
|
||||||
print('\n'.join(output))
|
|
||||||
|
|
|
@ -1,94 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# bitbake-dumpsig
|
|
||||||
# BitBake task signature dump utility
|
|
||||||
#
|
|
||||||
# Copyright (C) 2013 Intel Corporation
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
import optparse
|
|
||||||
import logging
|
|
||||||
import pickle
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
|
||||||
|
|
||||||
import bb.tinfoil
|
|
||||||
import bb.siggen
|
|
||||||
import bb.msg
|
|
||||||
|
|
||||||
logger = bb.msg.logger_create('bitbake-dumpsig')
|
|
||||||
|
|
||||||
def find_siginfo_task(bbhandler, pn, taskname):
|
|
||||||
""" Find the most recent signature file for the specified PN/task """
|
|
||||||
|
|
||||||
if not hasattr(bb.siggen, 'find_siginfo'):
|
|
||||||
logger.error('Metadata does not support finding signature data files')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not taskname.startswith('do_'):
|
|
||||||
taskname = 'do_%s' % taskname
|
|
||||||
|
|
||||||
filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data)
|
|
||||||
latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-1:]
|
|
||||||
if not latestfiles:
|
|
||||||
logger.error('No sigdata files found matching %s %s' % (pn, taskname))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
return latestfiles[0]
|
|
||||||
|
|
||||||
parser = optparse.OptionParser(
|
|
||||||
description = "Dumps siginfo/sigdata files written out by BitBake",
|
|
||||||
usage = """
|
|
||||||
%prog -t recipename taskname
|
|
||||||
%prog sigdatafile""")
|
|
||||||
|
|
||||||
parser.add_option("-D", "--debug",
|
|
||||||
help = "enable debug",
|
|
||||||
action = "store_true", dest="debug", default = False)
|
|
||||||
|
|
||||||
parser.add_option("-t", "--task",
|
|
||||||
help = "find the signature data file for the specified task",
|
|
||||||
action="store", dest="taskargs", nargs=2, metavar='recipename taskname')
|
|
||||||
|
|
||||||
options, args = parser.parse_args(sys.argv)
|
|
||||||
|
|
||||||
if options.debug:
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
if options.taskargs:
|
|
||||||
tinfoil = bb.tinfoil.Tinfoil()
|
|
||||||
tinfoil.prepare(config_only = True)
|
|
||||||
file = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1])
|
|
||||||
logger.debug("Signature file: %s" % file)
|
|
||||||
elif len(args) == 1:
|
|
||||||
parser.print_help()
|
|
||||||
sys.exit(0)
|
|
||||||
else:
|
|
||||||
file = args[1]
|
|
||||||
|
|
||||||
try:
|
|
||||||
output = bb.siggen.dump_sigfile(file)
|
|
||||||
except IOError as e:
|
|
||||||
logger.error(str(e))
|
|
||||||
sys.exit(1)
|
|
||||||
except (pickle.UnpicklingError, EOFError):
|
|
||||||
logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if output:
|
|
||||||
print('\n'.join(output))
|
|
|
@ -1,109 +1,299 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# This script has subcommands which operate against your bitbake layers, either
|
# This script has subcommands which operate against your bitbake layers, either
|
||||||
# displaying useful information, or acting against them.
|
# displaying useful information, or acting against them.
|
||||||
# See the help output for details on available commands.
|
# See the help output for details on available commands.
|
||||||
|
|
||||||
# Copyright (C) 2011 Mentor Graphics Corporation
|
import cmd
|
||||||
# Copyright (C) 2011-2015 Intel Corporation
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import argparse
|
|
||||||
import signal
|
|
||||||
|
|
||||||
bindir = os.path.dirname(__file__)
|
bindir = os.path.dirname(__file__)
|
||||||
topdir = os.path.dirname(bindir)
|
topdir = os.path.dirname(bindir)
|
||||||
sys.path[0:0] = [os.path.join(topdir, 'lib')]
|
sys.path[0:0] = [os.path.join(topdir, 'lib')]
|
||||||
|
|
||||||
import bb.tinfoil
|
import bb.cache
|
||||||
import bb.msg
|
import bb.cooker
|
||||||
|
import bb.providers
|
||||||
logger = bb.msg.logger_create('bitbake-layers', sys.stdout)
|
import bb.utils
|
||||||
|
from bb.cooker import state
|
||||||
def main():
|
|
||||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="BitBake layers utility",
|
|
||||||
epilog="Use %(prog)s <subcommand> --help to get help on a specific command",
|
|
||||||
add_help=False)
|
|
||||||
parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
|
|
||||||
parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
|
|
||||||
parser.add_argument('--color', choices=['auto', 'always', 'never'], default='auto', help='Colorize output (where %(metavar)s is %(choices)s)', metavar='COLOR')
|
|
||||||
|
|
||||||
global_args, unparsed_args = parser.parse_known_args()
|
|
||||||
|
|
||||||
# Help is added here rather than via add_help=True, as we don't want it to
|
|
||||||
# be handled by parse_known_args()
|
|
||||||
parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS,
|
|
||||||
help='show this help message and exit')
|
|
||||||
subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
|
|
||||||
subparsers.required = True
|
|
||||||
|
|
||||||
if global_args.debug:
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
elif global_args.quiet:
|
|
||||||
logger.setLevel(logging.ERROR)
|
|
||||||
|
|
||||||
# Need to re-run logger_create with color argument
|
|
||||||
# (will be the same logger since it has the same name)
|
|
||||||
bb.msg.logger_create('bitbake-layers', output=sys.stdout, color=global_args.color)
|
|
||||||
|
|
||||||
plugins = []
|
|
||||||
tinfoil = bb.tinfoil.Tinfoil(tracking=True)
|
|
||||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
|
||||||
try:
|
|
||||||
tinfoil.prepare(True)
|
|
||||||
for path in ([topdir] +
|
|
||||||
tinfoil.config_data.getVar('BBPATH').split(':')):
|
|
||||||
pluginpath = os.path.join(path, 'lib', 'bblayers')
|
|
||||||
bb.utils.load_plugins(logger, plugins, pluginpath)
|
|
||||||
|
|
||||||
registered = False
|
|
||||||
for plugin in plugins:
|
|
||||||
if hasattr(plugin, 'register_commands'):
|
|
||||||
registered = True
|
|
||||||
plugin.register_commands(subparsers)
|
|
||||||
if hasattr(plugin, 'tinfoil_init'):
|
|
||||||
plugin.tinfoil_init(tinfoil)
|
|
||||||
|
|
||||||
if not registered:
|
|
||||||
logger.error("No commands registered - missing plugins?")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
args = parser.parse_args(unparsed_args, namespace=global_args)
|
|
||||||
|
|
||||||
if getattr(args, 'parserecipes', False):
|
|
||||||
tinfoil.config_data.disableTracking()
|
|
||||||
tinfoil.parseRecipes()
|
|
||||||
tinfoil.config_data.enableTracking()
|
|
||||||
|
|
||||||
return args.func(args)
|
|
||||||
finally:
|
|
||||||
tinfoil.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
logger = logging.getLogger('BitBake')
|
||||||
try:
|
|
||||||
ret = main()
|
|
||||||
except bb.BBHandledException:
|
def main(args):
|
||||||
ret = 1
|
# Set up logging
|
||||||
except Exception:
|
console = logging.StreamHandler(sys.stdout)
|
||||||
ret = 1
|
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||||
import traceback
|
bb.msg.addDefaultlogFilter(console)
|
||||||
traceback.print_exc()
|
console.setFormatter(format)
|
||||||
sys.exit(ret)
|
logger.addHandler(console)
|
||||||
|
|
||||||
|
initialenv = os.environ.copy()
|
||||||
|
bb.utils.clean_environment()
|
||||||
|
|
||||||
|
cmds = Commands(initialenv)
|
||||||
|
if args:
|
||||||
|
cmds.onecmd(' '.join(args))
|
||||||
|
else:
|
||||||
|
cmds.do_help('')
|
||||||
|
return cmds.returncode
|
||||||
|
|
||||||
|
|
||||||
|
class Commands(cmd.Cmd):
|
||||||
|
def __init__(self, initialenv):
|
||||||
|
cmd.Cmd.__init__(self)
|
||||||
|
self.returncode = 0
|
||||||
|
self.config = Config(parse_only=True)
|
||||||
|
self.cooker = bb.cooker.BBCooker(self.config,
|
||||||
|
self.register_idle_function,
|
||||||
|
initialenv)
|
||||||
|
self.config_data = self.cooker.configuration.data
|
||||||
|
bb.providers.logger.setLevel(logging.ERROR)
|
||||||
|
self.cooker_data = None
|
||||||
|
|
||||||
|
def register_idle_function(self, function, data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def prepare_cooker(self):
|
||||||
|
sys.stderr.write("Parsing recipes..")
|
||||||
|
logger.setLevel(logging.WARNING)
|
||||||
|
|
||||||
|
try:
|
||||||
|
while self.cooker.state in (state.initial, state.parsing):
|
||||||
|
self.cooker.updateCache()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.cooker.shutdown()
|
||||||
|
self.cooker.updateCache()
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
sys.stderr.write("done.\n")
|
||||||
|
|
||||||
|
self.cooker_data = self.cooker.status
|
||||||
|
self.cooker_data.appends = self.cooker.appendlist
|
||||||
|
|
||||||
|
def check_prepare_cooker(self):
|
||||||
|
if not self.cooker_data:
|
||||||
|
self.prepare_cooker()
|
||||||
|
|
||||||
|
def default(self, line):
|
||||||
|
"""Handle unrecognised commands"""
|
||||||
|
sys.stderr.write("Unrecognised command or option\n")
|
||||||
|
self.do_help('')
|
||||||
|
|
||||||
|
def do_help(self, topic):
|
||||||
|
"""display general help or help on a specified command"""
|
||||||
|
if topic:
|
||||||
|
sys.stdout.write('%s: ' % topic)
|
||||||
|
cmd.Cmd.do_help(self,topic)
|
||||||
|
else:
|
||||||
|
sys.stdout.write("usage: bitbake-layers <command> [arguments]\n\n")
|
||||||
|
sys.stdout.write("Available commands:\n")
|
||||||
|
procnames = self.get_names()
|
||||||
|
for procname in procnames:
|
||||||
|
if procname[:3] == 'do_':
|
||||||
|
sys.stdout.write(" %s\n" % procname[3:])
|
||||||
|
doc = getattr(self, procname).__doc__
|
||||||
|
if doc:
|
||||||
|
sys.stdout.write(" %s\n" % doc.splitlines()[0])
|
||||||
|
|
||||||
|
def do_show_layers(self, args):
|
||||||
|
"""show current configured layers"""
|
||||||
|
self.check_prepare_cooker()
|
||||||
|
logger.plain('')
|
||||||
|
logger.plain("%s %s %s" % ("layer".ljust(20), "path".ljust(40), "priority"))
|
||||||
|
logger.plain('=' * 74)
|
||||||
|
layerdirs = str(self.config_data.getVar('BBLAYERS', True)).split()
|
||||||
|
for layerdir in layerdirs:
|
||||||
|
layername = '?'
|
||||||
|
layerpri = 0
|
||||||
|
for layer, _, regex, pri in self.cooker.status.bbfile_config_priorities:
|
||||||
|
if regex.match(os.path.join(layerdir, 'test')):
|
||||||
|
layername = layer
|
||||||
|
layerpri = pri
|
||||||
|
break
|
||||||
|
|
||||||
|
logger.plain("%s %s %d" % (layername.ljust(20), layerdir.ljust(40), layerpri))
|
||||||
|
|
||||||
|
def do_show_overlayed(self, args):
|
||||||
|
"""list overlayed recipes (where there is a recipe in another layer that has a higher layer priority)
|
||||||
|
|
||||||
|
usage: show_overlayed
|
||||||
|
|
||||||
|
Highest priority recipes are listed with the recipes they overlay as subitems.
|
||||||
|
"""
|
||||||
|
self.check_prepare_cooker()
|
||||||
|
if self.cooker.overlayed:
|
||||||
|
logger.plain('Overlayed recipes:')
|
||||||
|
for f in self.cooker.overlayed.iterkeys():
|
||||||
|
logger.plain('%s' % f)
|
||||||
|
for of in self.cooker.overlayed[f]:
|
||||||
|
logger.plain(' %s' % of)
|
||||||
|
else:
|
||||||
|
logger.plain('No overlayed recipes found')
|
||||||
|
|
||||||
|
def do_flatten(self, args):
|
||||||
|
"""flattens layer configuration into a separate output directory.
|
||||||
|
|
||||||
|
usage: flatten <outputdir>
|
||||||
|
|
||||||
|
Takes the current layer configuration and builds a "flattened" directory
|
||||||
|
containing the contents of all layers, with any overlayed recipes removed
|
||||||
|
and bbappends appended to the corresponding recipes. Note that some manual
|
||||||
|
cleanup may still be necessary afterwards, in particular:
|
||||||
|
|
||||||
|
* where non-recipe files (such as patches) are overwritten (the flatten
|
||||||
|
command will show a warning for these)
|
||||||
|
* where anything beyond the normal layer setup has been added to
|
||||||
|
layer.conf (only the lowest priority layer's layer.conf is used)
|
||||||
|
* overridden/appended items from bbappends will need to be tidied up
|
||||||
|
"""
|
||||||
|
arglist = args.split()
|
||||||
|
if len(arglist) != 1:
|
||||||
|
logger.error('Please specify an output directory')
|
||||||
|
self.do_help('flatten')
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.exists(arglist[0]) and os.listdir(arglist[0]):
|
||||||
|
logger.error('Directory %s exists and is non-empty, please clear it out first' % arglist[0])
|
||||||
|
return
|
||||||
|
|
||||||
|
self.check_prepare_cooker()
|
||||||
|
layers = (self.config_data.getVar('BBLAYERS', True) or "").split()
|
||||||
|
for layer in layers:
|
||||||
|
overlayed = []
|
||||||
|
for f in self.cooker.overlayed.iterkeys():
|
||||||
|
for of in self.cooker.overlayed[f]:
|
||||||
|
if of.startswith(layer):
|
||||||
|
overlayed.append(of)
|
||||||
|
|
||||||
|
logger.plain('Copying files from %s...' % layer )
|
||||||
|
for root, dirs, files in os.walk(layer):
|
||||||
|
for f1 in files:
|
||||||
|
f1full = os.sep.join([root, f1])
|
||||||
|
if f1full in overlayed:
|
||||||
|
logger.plain(' Skipping overlayed file %s' % f1full )
|
||||||
|
else:
|
||||||
|
ext = os.path.splitext(f1)[1]
|
||||||
|
if ext != '.bbappend':
|
||||||
|
fdest = f1full[len(layer):]
|
||||||
|
fdest = os.path.normpath(os.sep.join([arglist[0],fdest]))
|
||||||
|
bb.utils.mkdirhier(os.path.dirname(fdest))
|
||||||
|
if os.path.exists(fdest):
|
||||||
|
if f1 == 'layer.conf' and root.endswith('/conf'):
|
||||||
|
logger.plain(' Skipping layer config file %s' % f1full )
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger.warn('Overwriting file %s', fdest)
|
||||||
|
bb.utils.copyfile(f1full, fdest)
|
||||||
|
if ext == '.bb':
|
||||||
|
if f1 in self.cooker_data.appends:
|
||||||
|
appends = self.cooker_data.appends[f1]
|
||||||
|
if appends:
|
||||||
|
logger.plain(' Applying appends to %s' % fdest )
|
||||||
|
for appendname in appends:
|
||||||
|
self.apply_append(appendname, fdest)
|
||||||
|
|
||||||
|
def get_append_layer(self, appendname):
|
||||||
|
for layer, _, regex, _ in self.cooker.status.bbfile_config_priorities:
|
||||||
|
if regex.match(appendname):
|
||||||
|
return layer
|
||||||
|
return "?"
|
||||||
|
|
||||||
|
def apply_append(self, appendname, recipename):
|
||||||
|
appendfile = open(appendname, 'r')
|
||||||
|
recipefile = open(recipename, 'a')
|
||||||
|
recipefile.write('\n')
|
||||||
|
recipefile.write('##### bbappended from %s #####\n' % self.get_append_layer(appendname))
|
||||||
|
recipefile.writelines(appendfile.readlines())
|
||||||
|
|
||||||
|
def do_show_appends(self, args):
|
||||||
|
"""list bbappend files and recipe files they apply to
|
||||||
|
|
||||||
|
usage: show_appends
|
||||||
|
|
||||||
|
Recipes are listed with the bbappends that apply to them as subitems.
|
||||||
|
"""
|
||||||
|
self.check_prepare_cooker()
|
||||||
|
if not self.cooker_data.appends:
|
||||||
|
logger.plain('No append files found')
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.plain('State of append files:')
|
||||||
|
|
||||||
|
pnlist = list(self.cooker_data.pkg_pn.keys())
|
||||||
|
pnlist.sort()
|
||||||
|
for pn in pnlist:
|
||||||
|
self.show_appends_for_pn(pn)
|
||||||
|
|
||||||
|
self.show_appends_for_skipped()
|
||||||
|
|
||||||
|
def show_appends_for_pn(self, pn):
|
||||||
|
filenames = self.cooker_data.pkg_pn[pn]
|
||||||
|
|
||||||
|
best = bb.providers.findBestProvider(pn,
|
||||||
|
self.cooker.configuration.data,
|
||||||
|
self.cooker_data,
|
||||||
|
self.cooker_data.pkg_pn)
|
||||||
|
best_filename = os.path.basename(best[3])
|
||||||
|
|
||||||
|
self.show_appends_output(filenames, best_filename)
|
||||||
|
|
||||||
|
def show_appends_for_skipped(self):
|
||||||
|
filenames = [os.path.basename(f)
|
||||||
|
for f in self.cooker.skiplist.iterkeys()]
|
||||||
|
self.show_appends_output(filenames, None, " (skipped)")
|
||||||
|
|
||||||
|
def show_appends_output(self, filenames, best_filename, name_suffix = ''):
|
||||||
|
appended, missing = self.get_appends_for_files(filenames)
|
||||||
|
if appended:
|
||||||
|
for basename, appends in appended:
|
||||||
|
logger.plain('%s%s:', basename, name_suffix)
|
||||||
|
for append in appends:
|
||||||
|
logger.plain(' %s', append)
|
||||||
|
|
||||||
|
if best_filename:
|
||||||
|
if best_filename in missing:
|
||||||
|
logger.warn('%s: missing append for preferred version',
|
||||||
|
best_filename)
|
||||||
|
self.returncode |= 1
|
||||||
|
|
||||||
|
|
||||||
|
def get_appends_for_files(self, filenames):
|
||||||
|
appended, notappended = [], []
|
||||||
|
for filename in filenames:
|
||||||
|
_, cls = bb.cache.Cache.virtualfn2realfn(filename)
|
||||||
|
if cls:
|
||||||
|
continue
|
||||||
|
|
||||||
|
basename = os.path.basename(filename)
|
||||||
|
appends = self.cooker_data.appends.get(basename)
|
||||||
|
if appends:
|
||||||
|
appended.append((basename, list(appends)))
|
||||||
|
else:
|
||||||
|
notappended.append(basename)
|
||||||
|
return appended, notappended
|
||||||
|
|
||||||
|
|
||||||
|
class Config(object):
|
||||||
|
def __init__(self, **options):
|
||||||
|
self.pkgs_to_build = []
|
||||||
|
self.debug_domains = []
|
||||||
|
self.extra_assume_provided = []
|
||||||
|
self.prefile = []
|
||||||
|
self.postfile = []
|
||||||
|
self.debug = 0
|
||||||
|
self.__dict__.update(options)
|
||||||
|
|
||||||
|
def __getattr__(self, attribute):
|
||||||
|
try:
|
||||||
|
return super(Config, self).__getattribute__(attribute)
|
||||||
|
except AttributeError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main(sys.argv[1:]) or 0)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python
|
||||||
import os
|
import os
|
||||||
import sys,logging
|
import sys,logging
|
||||||
import optparse
|
import optparse
|
||||||
|
@ -10,39 +10,37 @@ import prserv.serv
|
||||||
|
|
||||||
__version__="1.0.0"
|
__version__="1.0.0"
|
||||||
|
|
||||||
PRHOST_DEFAULT='0.0.0.0'
|
PRHOST_DEFAULT=''
|
||||||
PRPORT_DEFAULT=8585
|
PRPORT_DEFAULT=8585
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = optparse.OptionParser(
|
parser = optparse.OptionParser(
|
||||||
version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
|
version="Bitbake PR Service Core version %s, %%prog version %s" % (prserv.__version__, __version__),
|
||||||
usage = "%prog < --start | --stop > [options]")
|
usage = "%prog [options]")
|
||||||
|
|
||||||
parser.add_option("-f", "--file", help="database filename(default: prserv.sqlite3)", action="store",
|
parser.add_option("-f", "--file", help="database filename(default prserv.db)", action="store",
|
||||||
dest="dbfile", type="string", default="prserv.sqlite3")
|
dest="dbfile", type="string", default="prserv.db")
|
||||||
parser.add_option("-l", "--log", help="log filename(default: prserv.log)", action="store",
|
parser.add_option("-l", "--log", help="log filename(default prserv.log)", action="store",
|
||||||
dest="logfile", type="string", default="prserv.log")
|
dest="logfile", type="string", default="prserv.log")
|
||||||
parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
|
parser.add_option("--loglevel", help="logging level, i.e. CRITICAL, ERROR, WARNING, INFO, DEBUG",
|
||||||
action = "store", type="string", dest="loglevel", default = "INFO")
|
action = "store", type="string", dest="loglevel", default = "WARNING")
|
||||||
parser.add_option("--start", help="start daemon",
|
parser.add_option("--start", help="start daemon",
|
||||||
action="store_true", dest="start")
|
action="store_true", dest="start", default="True")
|
||||||
parser.add_option("--stop", help="stop daemon",
|
parser.add_option("--stop", help="stop daemon",
|
||||||
action="store_true", dest="stop")
|
action="store_false", dest="start")
|
||||||
parser.add_option("--host", help="ip address to bind", action="store",
|
parser.add_option("--host", help="ip address to bind", action="store",
|
||||||
dest="host", type="string", default=PRHOST_DEFAULT)
|
dest="host", type="string", default=PRHOST_DEFAULT)
|
||||||
parser.add_option("--port", help="port number(default: 8585)", action="store",
|
parser.add_option("--port", help="port number(default 8585)", action="store",
|
||||||
dest="port", type="int", default=PRPORT_DEFAULT)
|
dest="port", type="int", default=PRPORT_DEFAULT)
|
||||||
|
|
||||||
options, args = parser.parse_args(sys.argv)
|
options, args = parser.parse_args(sys.argv)
|
||||||
|
|
||||||
prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
|
prserv.init_logger(os.path.abspath(options.logfile),options.loglevel)
|
||||||
|
|
||||||
if options.start:
|
if options.start:
|
||||||
ret=prserv.serv.start_daemon(options.dbfile, options.host, options.port,os.path.abspath(options.logfile))
|
prserv.serv.start_daemon(options)
|
||||||
elif options.stop:
|
|
||||||
ret=prserv.serv.stop_daemon(options.host, options.port)
|
|
||||||
else:
|
else:
|
||||||
ret=parser.print_help()
|
prserv.serv.stop_daemon()
|
||||||
return ret
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
|
@ -50,6 +48,6 @@ if __name__ == "__main__":
|
||||||
except Exception:
|
except Exception:
|
||||||
ret = 1
|
ret = 1
|
||||||
import traceback
|
import traceback
|
||||||
traceback.print_exc()
|
traceback.print_exc(5)
|
||||||
sys.exit(ret)
|
sys.exit(ret)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,119 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
||||||
|
from bb import fetch2
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake")
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cPickle as pickle
|
||||||
|
except ImportError:
|
||||||
|
import pickle
|
||||||
|
bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
|
||||||
|
|
||||||
|
class BBConfiguration(object):
|
||||||
|
"""
|
||||||
|
Manages build options and configurations for one run
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **options):
|
||||||
|
self.data = {}
|
||||||
|
self.file = []
|
||||||
|
self.cmd = None
|
||||||
|
self.dump_signatures = True
|
||||||
|
self.prefile = []
|
||||||
|
self.postfile = []
|
||||||
|
self.parse_only = True
|
||||||
|
|
||||||
|
def __getattr__(self, attribute):
|
||||||
|
try:
|
||||||
|
return super(BBConfiguration, self).__getattribute__(attribute)
|
||||||
|
except AttributeError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
_warnings_showwarning = warnings.showwarning
|
||||||
|
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
||||||
|
"""Display python warning messages using bb.msg"""
|
||||||
|
if file is not None:
|
||||||
|
if _warnings_showwarning is not None:
|
||||||
|
_warnings_showwarning(message, category, filename, lineno, file, line)
|
||||||
|
else:
|
||||||
|
s = warnings.formatwarning(message, category, filename, lineno)
|
||||||
|
s = s.split("\n")[0]
|
||||||
|
bb.msg.warn(None, s)
|
||||||
|
|
||||||
|
warnings.showwarning = _showwarning
|
||||||
|
warnings.simplefilter("ignore", DeprecationWarning)
|
||||||
|
|
||||||
|
import bb.event
|
||||||
|
import bb.cooker
|
||||||
|
|
||||||
|
buildfile = sys.argv[1]
|
||||||
|
taskname = sys.argv[2]
|
||||||
|
if len(sys.argv) >= 4:
|
||||||
|
dryrun = sys.argv[3]
|
||||||
|
else:
|
||||||
|
dryrun = False
|
||||||
|
if len(sys.argv) >= 5:
|
||||||
|
hashfile = sys.argv[4]
|
||||||
|
p = pickle.Unpickler(file(hashfile, "rb"))
|
||||||
|
hashdata = p.load()
|
||||||
|
else:
|
||||||
|
hashdata = None
|
||||||
|
|
||||||
|
handler = bb.event.LogHandler()
|
||||||
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
#An example to make debug log messages show up
|
||||||
|
#bb.msg.init_msgconfig(True, 3, [])
|
||||||
|
|
||||||
|
console = logging.StreamHandler(sys.stdout)
|
||||||
|
format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||||
|
bb.msg.addDefaultlogFilter(console)
|
||||||
|
console.setFormatter(format)
|
||||||
|
|
||||||
|
def worker_fire(event, d):
|
||||||
|
if isinstance(event, logging.LogRecord):
|
||||||
|
console.handle(event)
|
||||||
|
bb.event.worker_fire = worker_fire
|
||||||
|
bb.event.worker_pid = os.getpid()
|
||||||
|
|
||||||
|
initialenv = os.environ.copy()
|
||||||
|
config = BBConfiguration()
|
||||||
|
|
||||||
|
def register_idle_function(self, function, data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
cooker = bb.cooker.BBCooker(config, register_idle_function, initialenv)
|
||||||
|
config_data = cooker.configuration.data
|
||||||
|
cooker.status = config_data
|
||||||
|
cooker.handleCollections(bb.data.getVar("BBFILE_COLLECTIONS", config_data, 1))
|
||||||
|
|
||||||
|
fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
|
||||||
|
buildfile = cooker.matchFile(fn)
|
||||||
|
fn = bb.cache.Cache.realfn2virtual(buildfile, cls)
|
||||||
|
|
||||||
|
cooker.buildSetVars()
|
||||||
|
|
||||||
|
# Load data into the cache for fn and parse the loaded cache data
|
||||||
|
the_data = bb.cache.Cache.loadDataFull(fn, cooker.get_file_appends(fn), cooker.configuration.data)
|
||||||
|
|
||||||
|
if taskname.endswith("_setscene"):
|
||||||
|
the_data.setVarFlag(taskname, "quieterrors", "1")
|
||||||
|
|
||||||
|
if hashdata:
|
||||||
|
bb.parse.siggen.set_taskdata(hashdata["hashes"], hashdata["deps"])
|
||||||
|
for h in hashdata["hashes"]:
|
||||||
|
bb.data.setVar("BBHASH_%s" % h, hashdata["hashes"][h], the_data)
|
||||||
|
for h in hashdata["deps"]:
|
||||||
|
bb.data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h], the_data)
|
||||||
|
|
||||||
|
ret = 0
|
||||||
|
if dryrun != "True":
|
||||||
|
ret = bb.build.exec_task(fn, taskname, the_data)
|
||||||
|
sys.exit(ret)
|
||||||
|
|
|
@ -1,72 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
#
|
|
||||||
# Copyright (C) 2012 Richard Purdie
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys, logging
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)), 'lib'))
|
|
||||||
|
|
||||||
import unittest
|
|
||||||
try:
|
|
||||||
import bb
|
|
||||||
except RuntimeError as exc:
|
|
||||||
sys.exit(str(exc))
|
|
||||||
|
|
||||||
tests = ["bb.tests.codeparser",
|
|
||||||
"bb.tests.cow",
|
|
||||||
"bb.tests.data",
|
|
||||||
"bb.tests.fetch",
|
|
||||||
"bb.tests.parse",
|
|
||||||
"bb.tests.utils"]
|
|
||||||
|
|
||||||
for t in tests:
|
|
||||||
t = '.'.join(t.split('.')[:3])
|
|
||||||
__import__(t)
|
|
||||||
|
|
||||||
|
|
||||||
# Set-up logging
|
|
||||||
class StdoutStreamHandler(logging.StreamHandler):
|
|
||||||
"""Special handler so that unittest is able to capture stdout"""
|
|
||||||
def __init__(self):
|
|
||||||
# Override __init__() because we don't want to set self.stream here
|
|
||||||
logging.Handler.__init__(self)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def stream(self):
|
|
||||||
# We want to dynamically write wherever sys.stdout is pointing to
|
|
||||||
return sys.stdout
|
|
||||||
|
|
||||||
|
|
||||||
handler = StdoutStreamHandler()
|
|
||||||
bb.logger.addHandler(handler)
|
|
||||||
bb.logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
|
|
||||||
ENV_HELP = """\
|
|
||||||
Environment variables:
|
|
||||||
BB_SKIP_NETTESTS set to 'yes' in order to skip tests using network
|
|
||||||
connection
|
|
||||||
BB_TMPDIR_NOCLEAN set to 'yes' to preserve test tmp directories
|
|
||||||
"""
|
|
||||||
|
|
||||||
class main(unittest.main):
|
|
||||||
def _print_help(self, *args, **kwargs):
|
|
||||||
super(main, self)._print_help(*args, **kwargs)
|
|
||||||
print(ENV_HELP)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main(defaultTest=tests, buffer=True)
|
|
|
@ -1,502 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
|
|
||||||
from bb import fetch2
|
|
||||||
import logging
|
|
||||||
import bb
|
|
||||||
import select
|
|
||||||
import errno
|
|
||||||
import signal
|
|
||||||
import pickle
|
|
||||||
import traceback
|
|
||||||
import queue
|
|
||||||
from multiprocessing import Lock
|
|
||||||
from threading import Thread
|
|
||||||
|
|
||||||
if sys.getfilesystemencoding() != "utf-8":
|
|
||||||
sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.")
|
|
||||||
|
|
||||||
# Users shouldn't be running this code directly
|
|
||||||
if len(sys.argv) != 2 or not sys.argv[1].startswith("decafbad"):
|
|
||||||
print("bitbake-worker is meant for internal execution by bitbake itself, please don't use it standalone.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
profiling = False
|
|
||||||
if sys.argv[1].startswith("decafbadbad"):
|
|
||||||
profiling = True
|
|
||||||
try:
|
|
||||||
import cProfile as profile
|
|
||||||
except:
|
|
||||||
import profile
|
|
||||||
|
|
||||||
# Unbuffer stdout to avoid log truncation in the event
|
|
||||||
# of an unorderly exit as well as to provide timely
|
|
||||||
# updates to log files for use with tail
|
|
||||||
try:
|
|
||||||
if sys.stdout.name == '<stdout>':
|
|
||||||
import fcntl
|
|
||||||
fl = fcntl.fcntl(sys.stdout.fileno(), fcntl.F_GETFL)
|
|
||||||
fl |= os.O_SYNC
|
|
||||||
fcntl.fcntl(sys.stdout.fileno(), fcntl.F_SETFL, fl)
|
|
||||||
#sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
logger = logging.getLogger("BitBake")
|
|
||||||
|
|
||||||
worker_pipe = sys.stdout.fileno()
|
|
||||||
bb.utils.nonblockingfd(worker_pipe)
|
|
||||||
# Need to guard against multiprocessing being used in child processes
|
|
||||||
# and multiple processes trying to write to the parent at the same time
|
|
||||||
worker_pipe_lock = None
|
|
||||||
|
|
||||||
handler = bb.event.LogHandler()
|
|
||||||
logger.addHandler(handler)
|
|
||||||
|
|
||||||
if 0:
|
|
||||||
# Code to write out a log file of all events passing through the worker
|
|
||||||
logfilename = "/tmp/workerlogfile"
|
|
||||||
format_str = "%(levelname)s: %(message)s"
|
|
||||||
conlogformat = bb.msg.BBLogFormatter(format_str)
|
|
||||||
consolelog = logging.FileHandler(logfilename)
|
|
||||||
bb.msg.addDefaultlogFilter(consolelog)
|
|
||||||
consolelog.setFormatter(conlogformat)
|
|
||||||
logger.addHandler(consolelog)
|
|
||||||
|
|
||||||
worker_queue = queue.Queue()
|
|
||||||
|
|
||||||
def worker_fire(event, d):
|
|
||||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
|
||||||
worker_fire_prepickled(data)
|
|
||||||
|
|
||||||
def worker_fire_prepickled(event):
|
|
||||||
global worker_queue
|
|
||||||
|
|
||||||
worker_queue.put(event)
|
|
||||||
|
|
||||||
#
|
|
||||||
# We can end up with write contention with the cooker, it can be trying to send commands
|
|
||||||
# and we can be trying to send event data back. Therefore use a separate thread for writing
|
|
||||||
# back data to cooker.
|
|
||||||
#
|
|
||||||
worker_thread_exit = False
|
|
||||||
|
|
||||||
def worker_flush(worker_queue):
|
|
||||||
worker_queue_int = b""
|
|
||||||
global worker_pipe, worker_thread_exit
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
worker_queue_int = worker_queue_int + worker_queue.get(True, 1)
|
|
||||||
except queue.Empty:
|
|
||||||
pass
|
|
||||||
while (worker_queue_int or not worker_queue.empty()):
|
|
||||||
try:
|
|
||||||
(_, ready, _) = select.select([], [worker_pipe], [], 1)
|
|
||||||
if not worker_queue.empty():
|
|
||||||
worker_queue_int = worker_queue_int + worker_queue.get()
|
|
||||||
written = os.write(worker_pipe, worker_queue_int)
|
|
||||||
worker_queue_int = worker_queue_int[written:]
|
|
||||||
except (IOError, OSError) as e:
|
|
||||||
if e.errno != errno.EAGAIN and e.errno != errno.EPIPE:
|
|
||||||
raise
|
|
||||||
if worker_thread_exit and worker_queue.empty() and not worker_queue_int:
|
|
||||||
return
|
|
||||||
|
|
||||||
worker_thread = Thread(target=worker_flush, args=(worker_queue,))
|
|
||||||
worker_thread.start()
|
|
||||||
|
|
||||||
def worker_child_fire(event, d):
|
|
||||||
global worker_pipe
|
|
||||||
global worker_pipe_lock
|
|
||||||
|
|
||||||
data = b"<event>" + pickle.dumps(event) + b"</event>"
|
|
||||||
try:
|
|
||||||
worker_pipe_lock.acquire()
|
|
||||||
worker_pipe.write(data)
|
|
||||||
worker_pipe_lock.release()
|
|
||||||
except IOError:
|
|
||||||
sigterm_handler(None, None)
|
|
||||||
raise
|
|
||||||
|
|
||||||
bb.event.worker_fire = worker_fire
|
|
||||||
|
|
||||||
lf = None
|
|
||||||
#lf = open("/tmp/workercommandlog", "w+")
|
|
||||||
def workerlog_write(msg):
|
|
||||||
if lf:
|
|
||||||
lf.write(msg)
|
|
||||||
lf.flush()
|
|
||||||
|
|
||||||
def sigterm_handler(signum, frame):
|
|
||||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
|
||||||
os.killpg(0, signal.SIGTERM)
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False):
|
|
||||||
# We need to setup the environment BEFORE the fork, since
|
|
||||||
# a fork() or exec*() activates PSEUDO...
|
|
||||||
|
|
||||||
envbackup = {}
|
|
||||||
fakeenv = {}
|
|
||||||
umask = None
|
|
||||||
|
|
||||||
taskdep = workerdata["taskdeps"][fn]
|
|
||||||
if 'umask' in taskdep and taskname in taskdep['umask']:
|
|
||||||
# umask might come in as a number or text string..
|
|
||||||
try:
|
|
||||||
umask = int(taskdep['umask'][taskname],8)
|
|
||||||
except TypeError:
|
|
||||||
umask = taskdep['umask'][taskname]
|
|
||||||
|
|
||||||
dry_run = cfg.dry_run or dry_run_exec
|
|
||||||
|
|
||||||
# We can't use the fakeroot environment in a dry run as it possibly hasn't been built
|
|
||||||
if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run:
|
|
||||||
envvars = (workerdata["fakerootenv"][fn] or "").split()
|
|
||||||
for key, value in (var.split('=') for var in envvars):
|
|
||||||
envbackup[key] = os.environ.get(key)
|
|
||||||
os.environ[key] = value
|
|
||||||
fakeenv[key] = value
|
|
||||||
|
|
||||||
fakedirs = (workerdata["fakerootdirs"][fn] or "").split()
|
|
||||||
for p in fakedirs:
|
|
||||||
bb.utils.mkdirhier(p)
|
|
||||||
logger.debug(2, 'Running %s:%s under fakeroot, fakedirs: %s' %
|
|
||||||
(fn, taskname, ', '.join(fakedirs)))
|
|
||||||
else:
|
|
||||||
envvars = (workerdata["fakerootnoenv"][fn] or "").split()
|
|
||||||
for key, value in (var.split('=') for var in envvars):
|
|
||||||
envbackup[key] = os.environ.get(key)
|
|
||||||
os.environ[key] = value
|
|
||||||
fakeenv[key] = value
|
|
||||||
|
|
||||||
sys.stdout.flush()
|
|
||||||
sys.stderr.flush()
|
|
||||||
|
|
||||||
try:
|
|
||||||
pipein, pipeout = os.pipe()
|
|
||||||
pipein = os.fdopen(pipein, 'rb', 4096)
|
|
||||||
pipeout = os.fdopen(pipeout, 'wb', 0)
|
|
||||||
pid = os.fork()
|
|
||||||
except OSError as e:
|
|
||||||
logger.critical("fork failed: %d (%s)" % (e.errno, e.strerror))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if pid == 0:
|
|
||||||
def child():
|
|
||||||
global worker_pipe
|
|
||||||
global worker_pipe_lock
|
|
||||||
pipein.close()
|
|
||||||
|
|
||||||
signal.signal(signal.SIGTERM, sigterm_handler)
|
|
||||||
# Let SIGHUP exit as SIGTERM
|
|
||||||
signal.signal(signal.SIGHUP, sigterm_handler)
|
|
||||||
bb.utils.signal_on_parent_exit("SIGTERM")
|
|
||||||
|
|
||||||
# Save out the PID so that the event can include it the
|
|
||||||
# events
|
|
||||||
bb.event.worker_pid = os.getpid()
|
|
||||||
bb.event.worker_fire = worker_child_fire
|
|
||||||
worker_pipe = pipeout
|
|
||||||
worker_pipe_lock = Lock()
|
|
||||||
|
|
||||||
# Make the child the process group leader and ensure no
|
|
||||||
# child process will be controlled by the current terminal
|
|
||||||
# This ensures signals sent to the controlling terminal like Ctrl+C
|
|
||||||
# don't stop the child processes.
|
|
||||||
os.setsid()
|
|
||||||
# No stdin
|
|
||||||
newsi = os.open(os.devnull, os.O_RDWR)
|
|
||||||
os.dup2(newsi, sys.stdin.fileno())
|
|
||||||
|
|
||||||
if umask:
|
|
||||||
os.umask(umask)
|
|
||||||
|
|
||||||
try:
|
|
||||||
bb_cache = bb.cache.NoCache(databuilder)
|
|
||||||
(realfn, virtual, mc) = bb.cache.virtualfn2realfn(fn)
|
|
||||||
the_data = databuilder.mcdata[mc]
|
|
||||||
the_data.setVar("BB_WORKERCONTEXT", "1")
|
|
||||||
the_data.setVar("BB_TASKDEPDATA", taskdepdata)
|
|
||||||
if cfg.limited_deps:
|
|
||||||
the_data.setVar("BB_LIMITEDDEPS", "1")
|
|
||||||
the_data.setVar("BUILDNAME", workerdata["buildname"])
|
|
||||||
the_data.setVar("DATE", workerdata["date"])
|
|
||||||
the_data.setVar("TIME", workerdata["time"])
|
|
||||||
for varname, value in extraconfigdata.items():
|
|
||||||
the_data.setVar(varname, value)
|
|
||||||
|
|
||||||
bb.parse.siggen.set_taskdata(workerdata["sigdata"])
|
|
||||||
ret = 0
|
|
||||||
|
|
||||||
the_data = bb_cache.loadDataFull(fn, appends)
|
|
||||||
the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task])
|
|
||||||
|
|
||||||
bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", "")))
|
|
||||||
|
|
||||||
# exported_vars() returns a generator which *cannot* be passed to os.environ.update()
|
|
||||||
# successfully. We also need to unset anything from the environment which shouldn't be there
|
|
||||||
exports = bb.data.exported_vars(the_data)
|
|
||||||
|
|
||||||
bb.utils.empty_environment()
|
|
||||||
for e, v in exports:
|
|
||||||
os.environ[e] = v
|
|
||||||
|
|
||||||
for e in fakeenv:
|
|
||||||
os.environ[e] = fakeenv[e]
|
|
||||||
the_data.setVar(e, fakeenv[e])
|
|
||||||
the_data.setVarFlag(e, 'export', "1")
|
|
||||||
|
|
||||||
task_exports = the_data.getVarFlag(taskname, 'exports')
|
|
||||||
if task_exports:
|
|
||||||
for e in task_exports.split():
|
|
||||||
the_data.setVarFlag(e, 'export', '1')
|
|
||||||
v = the_data.getVar(e)
|
|
||||||
if v is not None:
|
|
||||||
os.environ[e] = v
|
|
||||||
|
|
||||||
if quieterrors:
|
|
||||||
the_data.setVarFlag(taskname, "quieterrors", "1")
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
if not quieterrors:
|
|
||||||
logger.critical(traceback.format_exc())
|
|
||||||
os._exit(1)
|
|
||||||
try:
|
|
||||||
if dry_run:
|
|
||||||
return 0
|
|
||||||
return bb.build.exec_task(fn, taskname, the_data, cfg.profile)
|
|
||||||
except:
|
|
||||||
os._exit(1)
|
|
||||||
if not profiling:
|
|
||||||
os._exit(child())
|
|
||||||
else:
|
|
||||||
profname = "profile-%s.log" % (fn.replace("/", "-") + "-" + taskname)
|
|
||||||
prof = profile.Profile()
|
|
||||||
try:
|
|
||||||
ret = profile.Profile.runcall(prof, child)
|
|
||||||
finally:
|
|
||||||
prof.dump_stats(profname)
|
|
||||||
bb.utils.process_profilelog(profname)
|
|
||||||
os._exit(ret)
|
|
||||||
else:
|
|
||||||
for key, value in iter(envbackup.items()):
|
|
||||||
if value is None:
|
|
||||||
del os.environ[key]
|
|
||||||
else:
|
|
||||||
os.environ[key] = value
|
|
||||||
|
|
||||||
return pid, pipein, pipeout
|
|
||||||
|
|
||||||
class runQueueWorkerPipe():
|
|
||||||
"""
|
|
||||||
Abstraction for a pipe between a worker thread and the worker server
|
|
||||||
"""
|
|
||||||
def __init__(self, pipein, pipeout):
|
|
||||||
self.input = pipein
|
|
||||||
if pipeout:
|
|
||||||
pipeout.close()
|
|
||||||
bb.utils.nonblockingfd(self.input)
|
|
||||||
self.queue = b""
|
|
||||||
|
|
||||||
def read(self):
|
|
||||||
start = len(self.queue)
|
|
||||||
try:
|
|
||||||
self.queue = self.queue + (self.input.read(102400) or b"")
|
|
||||||
except (OSError, IOError) as e:
|
|
||||||
if e.errno != errno.EAGAIN:
|
|
||||||
raise
|
|
||||||
|
|
||||||
end = len(self.queue)
|
|
||||||
index = self.queue.find(b"</event>")
|
|
||||||
while index != -1:
|
|
||||||
worker_fire_prepickled(self.queue[:index+8])
|
|
||||||
self.queue = self.queue[index+8:]
|
|
||||||
index = self.queue.find(b"</event>")
|
|
||||||
return (end > start)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
while self.read():
|
|
||||||
continue
|
|
||||||
if len(self.queue) > 0:
|
|
||||||
print("Warning, worker child left partial message: %s" % self.queue)
|
|
||||||
self.input.close()
|
|
||||||
|
|
||||||
normalexit = False
|
|
||||||
|
|
||||||
class BitbakeWorker(object):
|
|
||||||
def __init__(self, din):
|
|
||||||
self.input = din
|
|
||||||
bb.utils.nonblockingfd(self.input)
|
|
||||||
self.queue = b""
|
|
||||||
self.cookercfg = None
|
|
||||||
self.databuilder = None
|
|
||||||
self.data = None
|
|
||||||
self.extraconfigdata = None
|
|
||||||
self.build_pids = {}
|
|
||||||
self.build_pipes = {}
|
|
||||||
|
|
||||||
signal.signal(signal.SIGTERM, self.sigterm_exception)
|
|
||||||
# Let SIGHUP exit as SIGTERM
|
|
||||||
signal.signal(signal.SIGHUP, self.sigterm_exception)
|
|
||||||
if "beef" in sys.argv[1]:
|
|
||||||
bb.utils.set_process_name("Worker (Fakeroot)")
|
|
||||||
else:
|
|
||||||
bb.utils.set_process_name("Worker")
|
|
||||||
|
|
||||||
def sigterm_exception(self, signum, stackframe):
|
|
||||||
if signum == signal.SIGTERM:
|
|
||||||
bb.warn("Worker received SIGTERM, shutting down...")
|
|
||||||
elif signum == signal.SIGHUP:
|
|
||||||
bb.warn("Worker received SIGHUP, shutting down...")
|
|
||||||
self.handle_finishnow(None)
|
|
||||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
|
||||||
os.kill(os.getpid(), signal.SIGTERM)
|
|
||||||
|
|
||||||
def serve(self):
|
|
||||||
while True:
|
|
||||||
(ready, _, _) = select.select([self.input] + [i.input for i in self.build_pipes.values()], [] , [], 1)
|
|
||||||
if self.input in ready:
|
|
||||||
try:
|
|
||||||
r = self.input.read()
|
|
||||||
if len(r) == 0:
|
|
||||||
# EOF on pipe, server must have terminated
|
|
||||||
self.sigterm_exception(signal.SIGTERM, None)
|
|
||||||
self.queue = self.queue + r
|
|
||||||
except (OSError, IOError):
|
|
||||||
pass
|
|
||||||
if len(self.queue):
|
|
||||||
self.handle_item(b"cookerconfig", self.handle_cookercfg)
|
|
||||||
self.handle_item(b"extraconfigdata", self.handle_extraconfigdata)
|
|
||||||
self.handle_item(b"workerdata", self.handle_workerdata)
|
|
||||||
self.handle_item(b"runtask", self.handle_runtask)
|
|
||||||
self.handle_item(b"finishnow", self.handle_finishnow)
|
|
||||||
self.handle_item(b"ping", self.handle_ping)
|
|
||||||
self.handle_item(b"quit", self.handle_quit)
|
|
||||||
|
|
||||||
for pipe in self.build_pipes:
|
|
||||||
if self.build_pipes[pipe].input in ready:
|
|
||||||
self.build_pipes[pipe].read()
|
|
||||||
if len(self.build_pids):
|
|
||||||
while self.process_waitpid():
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
def handle_item(self, item, func):
|
|
||||||
if self.queue.startswith(b"<" + item + b">"):
|
|
||||||
index = self.queue.find(b"</" + item + b">")
|
|
||||||
while index != -1:
|
|
||||||
func(self.queue[(len(item) + 2):index])
|
|
||||||
self.queue = self.queue[(index + len(item) + 3):]
|
|
||||||
index = self.queue.find(b"</" + item + b">")
|
|
||||||
|
|
||||||
def handle_cookercfg(self, data):
|
|
||||||
self.cookercfg = pickle.loads(data)
|
|
||||||
self.databuilder = bb.cookerdata.CookerDataBuilder(self.cookercfg, worker=True)
|
|
||||||
self.databuilder.parseBaseConfiguration()
|
|
||||||
self.data = self.databuilder.data
|
|
||||||
|
|
||||||
def handle_extraconfigdata(self, data):
|
|
||||||
self.extraconfigdata = pickle.loads(data)
|
|
||||||
|
|
||||||
def handle_workerdata(self, data):
|
|
||||||
self.workerdata = pickle.loads(data)
|
|
||||||
bb.msg.loggerDefaultDebugLevel = self.workerdata["logdefaultdebug"]
|
|
||||||
bb.msg.loggerDefaultVerbose = self.workerdata["logdefaultverbose"]
|
|
||||||
bb.msg.loggerVerboseLogs = self.workerdata["logdefaultverboselogs"]
|
|
||||||
bb.msg.loggerDefaultDomains = self.workerdata["logdefaultdomain"]
|
|
||||||
for mc in self.databuilder.mcdata:
|
|
||||||
self.databuilder.mcdata[mc].setVar("PRSERV_HOST", self.workerdata["prhost"])
|
|
||||||
|
|
||||||
def handle_ping(self, _):
|
|
||||||
workerlog_write("Handling ping\n")
|
|
||||||
|
|
||||||
logger.warning("Pong from bitbake-worker!")
|
|
||||||
|
|
||||||
def handle_quit(self, data):
|
|
||||||
workerlog_write("Handling quit\n")
|
|
||||||
|
|
||||||
global normalexit
|
|
||||||
normalexit = True
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
def handle_runtask(self, data):
|
|
||||||
fn, task, taskname, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data)
|
|
||||||
workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname))
|
|
||||||
|
|
||||||
pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec)
|
|
||||||
|
|
||||||
self.build_pids[pid] = task
|
|
||||||
self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout)
|
|
||||||
|
|
||||||
def process_waitpid(self):
|
|
||||||
"""
|
|
||||||
Return none is there are no processes awaiting result collection, otherwise
|
|
||||||
collect the process exit codes and close the information pipe.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
pid, status = os.waitpid(-1, os.WNOHANG)
|
|
||||||
if pid == 0 or os.WIFSTOPPED(status):
|
|
||||||
return False
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
workerlog_write("Exit code of %s for pid %s\n" % (status, pid))
|
|
||||||
|
|
||||||
if os.WIFEXITED(status):
|
|
||||||
status = os.WEXITSTATUS(status)
|
|
||||||
elif os.WIFSIGNALED(status):
|
|
||||||
# Per shell conventions for $?, when a process exits due to
|
|
||||||
# a signal, we return an exit code of 128 + SIGNUM
|
|
||||||
status = 128 + os.WTERMSIG(status)
|
|
||||||
|
|
||||||
task = self.build_pids[pid]
|
|
||||||
del self.build_pids[pid]
|
|
||||||
|
|
||||||
self.build_pipes[pid].close()
|
|
||||||
del self.build_pipes[pid]
|
|
||||||
|
|
||||||
worker_fire_prepickled(b"<exitcode>" + pickle.dumps((task, status)) + b"</exitcode>")
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def handle_finishnow(self, _):
|
|
||||||
if self.build_pids:
|
|
||||||
logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids))
|
|
||||||
for k, v in iter(self.build_pids.items()):
|
|
||||||
try:
|
|
||||||
os.kill(-k, signal.SIGTERM)
|
|
||||||
os.waitpid(-1, 0)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
for pipe in self.build_pipes:
|
|
||||||
self.build_pipes[pipe].read()
|
|
||||||
|
|
||||||
try:
|
|
||||||
worker = BitbakeWorker(os.fdopen(sys.stdin.fileno(), 'rb'))
|
|
||||||
if not profiling:
|
|
||||||
worker.serve()
|
|
||||||
else:
|
|
||||||
profname = "profile-worker.log"
|
|
||||||
prof = profile.Profile()
|
|
||||||
try:
|
|
||||||
profile.Profile.runcall(prof, worker.serve)
|
|
||||||
finally:
|
|
||||||
prof.dump_stats(profname)
|
|
||||||
bb.utils.process_profilelog(profname)
|
|
||||||
except BaseException as e:
|
|
||||||
if not normalexit:
|
|
||||||
import traceback
|
|
||||||
sys.stderr.write(traceback.format_exc())
|
|
||||||
sys.stderr.write(str(e))
|
|
||||||
|
|
||||||
worker_thread_exit = True
|
|
||||||
worker_thread.join()
|
|
||||||
|
|
||||||
workerlog_write("exitting")
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
#
|
#
|
||||||
|
@ -462,7 +462,7 @@ def main():
|
||||||
state_group = 2
|
state_group = 2
|
||||||
|
|
||||||
for key in bb.data.keys(documentation):
|
for key in bb.data.keys(documentation):
|
||||||
data = documentation.getVarFlag(key, "doc", False)
|
data = bb.data.getVarFlag(key, "doc", documentation)
|
||||||
if not data:
|
if not data:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -1,281 +0,0 @@
|
||||||
#!/bin/echo ERROR: This script needs to be sourced. Please run as .
|
|
||||||
|
|
||||||
# toaster - shell script to start Toaster
|
|
||||||
|
|
||||||
# Copyright (C) 2013-2015 Intel Corp.
|
|
||||||
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation; either version 2 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program. If not, see http://www.gnu.org/licenses/.
|
|
||||||
|
|
||||||
HELP="
|
|
||||||
Usage: source toaster start|stop [webport=<address:port>] [noweb]
|
|
||||||
Optional arguments:
|
|
||||||
[noweb] Setup the environment for building with toaster but don't start the development server
|
|
||||||
[webport] Set the development server (default: localhost:8000)
|
|
||||||
"
|
|
||||||
|
|
||||||
databaseCheck()
|
|
||||||
{
|
|
||||||
retval=0
|
|
||||||
# you can always add a superuser later via
|
|
||||||
# ../bitbake/lib/toaster/manage.py createsuperuser --username=<ME>
|
|
||||||
$MANAGE migrate --noinput || retval=1
|
|
||||||
|
|
||||||
if [ $retval -eq 1 ]; then
|
|
||||||
echo "Failed migrations, aborting system start" 1>&2
|
|
||||||
return $retval
|
|
||||||
fi
|
|
||||||
# Make sure that checksettings can pick up any value for TEMPLATECONF
|
|
||||||
export TEMPLATECONF
|
|
||||||
$MANAGE checksettings --traceback || retval=1
|
|
||||||
|
|
||||||
if [ $retval -eq 1 ]; then
|
|
||||||
printf "\nError while checking settings; aborting\n"
|
|
||||||
return $retval
|
|
||||||
fi
|
|
||||||
|
|
||||||
return $retval
|
|
||||||
}
|
|
||||||
|
|
||||||
webserverKillAll()
|
|
||||||
{
|
|
||||||
local pidfile
|
|
||||||
for pidfile in ${BUILDDIR}/.toastermain.pid ${BUILDDIR}/.runbuilds.pid; do
|
|
||||||
if [ -f ${pidfile} ]; then
|
|
||||||
pid=`cat ${pidfile}`
|
|
||||||
while kill -0 $pid 2>/dev/null; do
|
|
||||||
kill -SIGTERM -$pid 2>/dev/null
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
rm ${pidfile}
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
webserverStartAll()
|
|
||||||
{
|
|
||||||
# do not start if toastermain points to a valid process
|
|
||||||
if ! cat "${BUILDDIR}/.toastermain.pid" 2>/dev/null | xargs -I{} kill -0 {} ; then
|
|
||||||
retval=1
|
|
||||||
rm "${BUILDDIR}/.toastermain.pid"
|
|
||||||
fi
|
|
||||||
|
|
||||||
retval=0
|
|
||||||
|
|
||||||
# check the database
|
|
||||||
databaseCheck || return 1
|
|
||||||
|
|
||||||
echo "Starting webserver..."
|
|
||||||
|
|
||||||
$MANAGE runserver "$ADDR_PORT" \
|
|
||||||
</dev/null >>${BUILDDIR}/toaster_web.log 2>&1 \
|
|
||||||
& echo $! >${BUILDDIR}/.toastermain.pid
|
|
||||||
|
|
||||||
sleep 1
|
|
||||||
|
|
||||||
if ! cat "${BUILDDIR}/.toastermain.pid" | xargs -I{} kill -0 {} ; then
|
|
||||||
retval=1
|
|
||||||
rm "${BUILDDIR}/.toastermain.pid"
|
|
||||||
else
|
|
||||||
echo "Toaster development webserver started at http://$ADDR_PORT"
|
|
||||||
echo -e "\nYou can now run 'bitbake <target>' on the command line and monitor your build in Toaster.\nYou can also use a Toaster project to configure and run a build.\n"
|
|
||||||
fi
|
|
||||||
|
|
||||||
return $retval
|
|
||||||
}
|
|
||||||
|
|
||||||
INSTOPSYSTEM=0
|
|
||||||
|
|
||||||
# define the stop command
|
|
||||||
stop_system()
|
|
||||||
{
|
|
||||||
# prevent reentry
|
|
||||||
if [ $INSTOPSYSTEM -eq 1 ]; then return; fi
|
|
||||||
INSTOPSYSTEM=1
|
|
||||||
webserverKillAll
|
|
||||||
# unset exported variables
|
|
||||||
unset TOASTER_DIR
|
|
||||||
unset BITBAKE_UI
|
|
||||||
unset BBBASEDIR
|
|
||||||
trap - SIGHUP
|
|
||||||
#trap - SIGCHLD
|
|
||||||
INSTOPSYSTEM=0
|
|
||||||
}
|
|
||||||
|
|
||||||
verify_prereq() {
|
|
||||||
# Verify Django version
|
|
||||||
reqfile=$(python3 -c "import os; print(os.path.realpath('$BBBASEDIR/toaster-requirements.txt'))")
|
|
||||||
exp='s/Django\([><=]\+\)\([^,]\+\),\([><=]\+\)\(.\+\)/'
|
|
||||||
exp=$exp'import sys,django;version=django.get_version().split(".");'
|
|
||||||
exp=$exp'sys.exit(not (version \1 "\2".split(".") and version \3 "\4".split(".")))/p'
|
|
||||||
if ! sed -n "$exp" $reqfile | python3 - ; then
|
|
||||||
req=`grep ^Django $reqfile`
|
|
||||||
echo "This program needs $req"
|
|
||||||
echo "Please install with pip3 install -r $reqfile"
|
|
||||||
return 2
|
|
||||||
fi
|
|
||||||
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
# read command line parameters
|
|
||||||
if [ -n "$BASH_SOURCE" ] ; then
|
|
||||||
TOASTER=${BASH_SOURCE}
|
|
||||||
elif [ -n "$ZSH_NAME" ] ; then
|
|
||||||
TOASTER=${(%):-%x}
|
|
||||||
else
|
|
||||||
TOASTER=$0
|
|
||||||
fi
|
|
||||||
|
|
||||||
export BBBASEDIR=`dirname $TOASTER`/..
|
|
||||||
MANAGE="python3 $BBBASEDIR/lib/toaster/manage.py"
|
|
||||||
OE_ROOT=`dirname $TOASTER`/../..
|
|
||||||
|
|
||||||
# this is the configuraton file we are using for toaster
|
|
||||||
# we are using the same logic that oe-setup-builddir uses
|
|
||||||
# (based on TEMPLATECONF and .templateconf) to determine
|
|
||||||
# which toasterconf.json to use.
|
|
||||||
# note: There are a number of relative path assumptions
|
|
||||||
# in the local layers that currently make using an arbitrary
|
|
||||||
# toasterconf.json difficult.
|
|
||||||
|
|
||||||
. $OE_ROOT/.templateconf
|
|
||||||
if [ -n "$TEMPLATECONF" ]; then
|
|
||||||
if [ ! -d "$TEMPLATECONF" ]; then
|
|
||||||
# Allow TEMPLATECONF=meta-xyz/conf as a shortcut
|
|
||||||
if [ -d "$OE_ROOT/$TEMPLATECONF" ]; then
|
|
||||||
TEMPLATECONF="$OE_ROOT/$TEMPLATECONF"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
unset OE_ROOT
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
WEBSERVER=1
|
|
||||||
ADDR_PORT="localhost:8000"
|
|
||||||
unset CMD
|
|
||||||
for param in $*; do
|
|
||||||
case $param in
|
|
||||||
noweb )
|
|
||||||
WEBSERVER=0
|
|
||||||
;;
|
|
||||||
start )
|
|
||||||
CMD=$param
|
|
||||||
;;
|
|
||||||
stop )
|
|
||||||
CMD=$param
|
|
||||||
;;
|
|
||||||
webport=*)
|
|
||||||
ADDR_PORT="${param#*=}"
|
|
||||||
# Split the addr:port string
|
|
||||||
ADDR=`echo $ADDR_PORT | cut -f 1 -d ':'`
|
|
||||||
PORT=`echo $ADDR_PORT | cut -f 2 -d ':'`
|
|
||||||
# If only a port has been speified then set address to localhost.
|
|
||||||
if [ $ADDR = $PORT ] ; then
|
|
||||||
ADDR_PORT="localhost:$PORT"
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
--help)
|
|
||||||
echo "$HELP"
|
|
||||||
return 0
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo "$HELP"
|
|
||||||
return 1
|
|
||||||
;;
|
|
||||||
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ `basename \"$0\"` = `basename \"${TOASTER}\"` ]; then
|
|
||||||
echo "Error: This script needs to be sourced. Please run as . $TOASTER"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
verify_prereq || return 1
|
|
||||||
|
|
||||||
# We make sure we're running in the current shell and in a good environment
|
|
||||||
if [ -z "$BUILDDIR" ] || ! which bitbake >/dev/null 2>&1 ; then
|
|
||||||
echo "Error: Build environment is not setup or bitbake is not in path." 1>&2
|
|
||||||
return 2
|
|
||||||
fi
|
|
||||||
|
|
||||||
# this defines the dir toaster will use for
|
|
||||||
# 1) clones of layers (in _toaster_clones )
|
|
||||||
# 2) the build dir (in build)
|
|
||||||
# 3) the sqlite db if that is being used.
|
|
||||||
# 4) pid's we need to clean up on exit/shutdown
|
|
||||||
export TOASTER_DIR=`dirname $BUILDDIR`
|
|
||||||
export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE TOASTER_DIR"
|
|
||||||
|
|
||||||
# Determine the action. If specified by arguments, fine, if not, toggle it
|
|
||||||
if [ "$CMD" = "start" ] ; then
|
|
||||||
if [ -n "$BBSERVER" ]; then
|
|
||||||
echo " Toaster is already running. Exiting..."
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
elif [ "$CMD" = "" ]; then
|
|
||||||
echo "No command specified"
|
|
||||||
echo "$HELP"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "The system will $CMD."
|
|
||||||
|
|
||||||
# Execute the commands
|
|
||||||
|
|
||||||
case $CMD in
|
|
||||||
start )
|
|
||||||
# check if addr:port is not in use
|
|
||||||
if [ "$CMD" == 'start' ]; then
|
|
||||||
if [ $WEBSERVER -gt 0 ]; then
|
|
||||||
$MANAGE checksocket "$ADDR_PORT" || return 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create configuration file
|
|
||||||
conf=${BUILDDIR}/conf/local.conf
|
|
||||||
line='INHERIT+="toaster buildhistory"'
|
|
||||||
grep -q "$line" $conf || echo $line >> $conf
|
|
||||||
|
|
||||||
if [ $WEBSERVER -eq 0 ] ; then
|
|
||||||
# Do not update the database for "noweb" unless
|
|
||||||
# it does not yet exist
|
|
||||||
if [ ! -f "$TOASTER_DIR/toaster.sqlite" ] ; then
|
|
||||||
if ! databaseCheck; then
|
|
||||||
echo "Failed ${CMD}."
|
|
||||||
return 4
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then
|
|
||||||
echo "Failed ${CMD}."
|
|
||||||
return 4
|
|
||||||
fi
|
|
||||||
export BITBAKE_UI='toasterui'
|
|
||||||
$MANAGE runbuilds \
|
|
||||||
</dev/null >>${BUILDDIR}/toaster_runbuilds.log 2>&1 \
|
|
||||||
& echo $! >${BUILDDIR}/.runbuilds.pid
|
|
||||||
|
|
||||||
# set fail safe stop system on terminal exit
|
|
||||||
trap stop_system SIGHUP
|
|
||||||
echo "Successful ${CMD}."
|
|
||||||
return 0
|
|
||||||
;;
|
|
||||||
stop )
|
|
||||||
stop_system
|
|
||||||
echo "Successful ${CMD}."
|
|
||||||
;;
|
|
||||||
esac
|
|
|
@ -1,126 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
#
|
|
||||||
# Copyright (C) 2014 Alex Damian
|
|
||||||
#
|
|
||||||
# This file re-uses code spread throughout other Bitbake source files.
|
|
||||||
# As such, all other copyrights belong to their own right holders.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
"""
|
|
||||||
This command takes a filename as a single parameter. The filename is read
|
|
||||||
as a build eventlog, and the ToasterUI is used to process events in the file
|
|
||||||
and log data in the database
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import pickle
|
|
||||||
import codecs
|
|
||||||
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
# mangle syspath to allow easy import of modules
|
|
||||||
from os.path import join, dirname, abspath
|
|
||||||
sys.path.insert(0, join(dirname(dirname(abspath(__file__))), 'lib'))
|
|
||||||
|
|
||||||
import bb.cooker
|
|
||||||
from bb.ui import toasterui
|
|
||||||
|
|
||||||
class EventPlayer:
|
|
||||||
"""Emulate a connection to a bitbake server."""
|
|
||||||
|
|
||||||
def __init__(self, eventfile, variables):
|
|
||||||
self.eventfile = eventfile
|
|
||||||
self.variables = variables
|
|
||||||
self.eventmask = []
|
|
||||||
|
|
||||||
def waitEvent(self, _timeout):
|
|
||||||
"""Read event from the file."""
|
|
||||||
line = self.eventfile.readline().strip()
|
|
||||||
if not line:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
event_str = json.loads(line)['vars'].encode('utf-8')
|
|
||||||
event = pickle.loads(codecs.decode(event_str, 'base64'))
|
|
||||||
event_name = "%s.%s" % (event.__module__, event.__class__.__name__)
|
|
||||||
if event_name not in self.eventmask:
|
|
||||||
return
|
|
||||||
return event
|
|
||||||
except ValueError as err:
|
|
||||||
print("Failed loading ", line)
|
|
||||||
raise err
|
|
||||||
|
|
||||||
def runCommand(self, command_line):
|
|
||||||
"""Emulate running a command on the server."""
|
|
||||||
name = command_line[0]
|
|
||||||
|
|
||||||
if name == "getVariable":
|
|
||||||
var_name = command_line[1]
|
|
||||||
variable = self.variables.get(var_name)
|
|
||||||
if variable:
|
|
||||||
return variable['v'], None
|
|
||||||
return None, "Missing variable %s" % var_name
|
|
||||||
|
|
||||||
elif name == "getAllKeysWithFlags":
|
|
||||||
dump = {}
|
|
||||||
flaglist = command_line[1]
|
|
||||||
for key, val in self.variables.items():
|
|
||||||
try:
|
|
||||||
if not key.startswith("__"):
|
|
||||||
dump[key] = {
|
|
||||||
'v': val['v'],
|
|
||||||
'history' : val['history'],
|
|
||||||
}
|
|
||||||
for flag in flaglist:
|
|
||||||
dump[key][flag] = val[flag]
|
|
||||||
except Exception as err:
|
|
||||||
print(err)
|
|
||||||
return (dump, None)
|
|
||||||
|
|
||||||
elif name == 'setEventMask':
|
|
||||||
self.eventmask = command_line[-1]
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise Exception("Command %s not implemented" % command_line[0])
|
|
||||||
|
|
||||||
def getEventHandle(self):
|
|
||||||
"""
|
|
||||||
This method is called by toasterui.
|
|
||||||
The return value is passed to self.runCommand but not used there.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def main(argv):
|
|
||||||
with open(argv[-1]) as eventfile:
|
|
||||||
# load variables from the first line
|
|
||||||
variables = json.loads(eventfile.readline().strip())['allvariables']
|
|
||||||
|
|
||||||
params = namedtuple('ConfigParams', ['observe_only'])(True)
|
|
||||||
player = EventPlayer(eventfile, variables)
|
|
||||||
|
|
||||||
return toasterui.main(player, player, params)
|
|
||||||
|
|
||||||
# run toaster ui on our mock bitbake class
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if len(sys.argv) != 2:
|
|
||||||
print("Usage: %s <event file>" % os.path.basename(sys.argv[0]))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
sys.exit(main(sys.argv))
|
|
|
@ -1,68 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
#
|
|
||||||
# Copyright (C) 2012 Wind River Systems, Inc.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
#
|
|
||||||
# This is used for dumping the bb_cache.dat, the output format is:
|
|
||||||
# recipe_path PN PV PACKAGES
|
|
||||||
#
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
# For importing bb.cache
|
|
||||||
sys.path.insert(0, os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '../lib'))
|
|
||||||
from bb.cache import CoreRecipeInfo
|
|
||||||
|
|
||||||
import pickle as pickle
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
"""
|
|
||||||
Get the mapping for the target recipe.
|
|
||||||
"""
|
|
||||||
if len(argv) != 1:
|
|
||||||
print("Error, need one argument!", file=sys.stderr)
|
|
||||||
return 2
|
|
||||||
|
|
||||||
cachefile = argv[0]
|
|
||||||
|
|
||||||
with open(cachefile, "rb") as cachefile:
|
|
||||||
pickled = pickle.Unpickler(cachefile)
|
|
||||||
while cachefile:
|
|
||||||
try:
|
|
||||||
key = pickled.load()
|
|
||||||
val = pickled.load()
|
|
||||||
except Exception:
|
|
||||||
break
|
|
||||||
if isinstance(val, CoreRecipeInfo) and (not val.skipped):
|
|
||||||
pn = val.pn
|
|
||||||
# Filter out the native recipes.
|
|
||||||
if key.startswith('virtual:native:') or pn.endswith("-native"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 1.0 is the default version for a no PV recipe.
|
|
||||||
if "pv" in val.__dict__:
|
|
||||||
pv = val.pv
|
|
||||||
else:
|
|
||||||
pv = "1.0"
|
|
||||||
|
|
||||||
print("%s %s %s %s" % (key, pn, pv, ' '.join(val.packages)))
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main(sys.argv[1:]))
|
|
||||||
|
|
|
@ -10,11 +10,11 @@ if &compatible || version < 600
|
||||||
finish
|
finish
|
||||||
endif
|
endif
|
||||||
|
|
||||||
" .bb, .bbappend and .bbclass
|
" .bb and .bbclass
|
||||||
au BufNewFile,BufRead *.{bb,bbappend,bbclass} set filetype=bitbake
|
au BufNewFile,BufRead *.b{b,bclass} set filetype=bitbake
|
||||||
|
|
||||||
" .inc
|
" .inc
|
||||||
au BufNewFile,BufRead *.inc set filetype=bitbake
|
au BufNewFile,BufRead *.inc set filetype=bitbake
|
||||||
|
|
||||||
" .conf
|
" .conf
|
||||||
au BufNewFile,BufRead *.conf
|
au BufNewFile,BufRead *.conf
|
||||||
|
|
|
@ -1,2 +1 @@
|
||||||
set sts=4 sw=4 et
|
set sts=4 sw=4 et
|
||||||
set cms=#%s
|
|
||||||
|
|
|
@ -15,15 +15,15 @@ if &compatible || v:version < 600
|
||||||
endif
|
endif
|
||||||
|
|
||||||
fun! <SID>GetUserName()
|
fun! <SID>GetUserName()
|
||||||
let l:user_name = system("git config --get user.name")
|
let l:user_name = system("git-config --get user.name")
|
||||||
if v:shell_error
|
if v:shell_error
|
||||||
return "Unknown User"
|
return "Unknow User"
|
||||||
else
|
else
|
||||||
return substitute(l:user_name, "\n", "", "")
|
return substitute(l:user_name, "\n", "", "")
|
||||||
endfun
|
endfun
|
||||||
|
|
||||||
fun! <SID>GetUserEmail()
|
fun! <SID>GetUserEmail()
|
||||||
let l:user_email = system("git config --get user.email")
|
let l:user_email = system("git-config --get user.email")
|
||||||
if v:shell_error
|
if v:shell_error
|
||||||
return "unknow@user.org"
|
return "unknow@user.org"
|
||||||
else
|
else
|
||||||
|
@ -53,6 +53,7 @@ fun! NewBBTemplate()
|
||||||
put ='LICENSE = \"\"'
|
put ='LICENSE = \"\"'
|
||||||
put ='SECTION = \"\"'
|
put ='SECTION = \"\"'
|
||||||
put ='DEPENDS = \"\"'
|
put ='DEPENDS = \"\"'
|
||||||
|
put ='PR = \"r0\"'
|
||||||
put =''
|
put =''
|
||||||
put ='SRC_URI = \"\"'
|
put ='SRC_URI = \"\"'
|
||||||
|
|
||||||
|
|
|
@ -44,22 +44,22 @@ syn match bbArrayBrackets "[\[\]]" contained
|
||||||
|
|
||||||
" BitBake strings
|
" BitBake strings
|
||||||
syn match bbContinue "\\$"
|
syn match bbContinue "\\$"
|
||||||
syn region bbString matchgroup=bbQuote start=+"+ skip=+\\$+ end=+"+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
|
syn region bbString matchgroup=bbQuote start=+"+ skip=+\\$+ excludenl end=+"+ contained keepend contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
|
||||||
syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ end=+'+ contained contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
|
syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ excludenl end=+'+ contained keepend contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
|
||||||
|
|
||||||
" Vars definition
|
" Vars definition
|
||||||
syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
|
syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
|
||||||
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
|
syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
|
||||||
syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
|
syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
|
||||||
syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
|
syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
|
||||||
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)" contained nextgroup=bbVarValue
|
syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
|
||||||
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|??=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
|
||||||
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
|
syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
|
||||||
syn region bbVarPyValue start=+${@+ skip=+\\$+ end=+}+ contained contains=@python
|
syn region bbVarPyValue start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
|
||||||
|
|
||||||
" Vars metadata flags
|
" Vars metadata flags
|
||||||
syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
|
syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
|
||||||
syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\|+=\|=+\|?=\)\@=" contained contains=bbIdentifier nextgroup=bbVarEq
|
syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\)\@=" keepend excludenl contained contains=bbIdentifier nextgroup=bbVarEq
|
||||||
|
|
||||||
" Includes and requires
|
" Includes and requires
|
||||||
syn keyword bbInclude inherit include require contained
|
syn keyword bbInclude inherit include require contained
|
||||||
|
@ -83,16 +83,13 @@ if exists("b:current_syntax")
|
||||||
unlet b:current_syntax
|
unlet b:current_syntax
|
||||||
endif
|
endif
|
||||||
syn keyword bbShFakeRootFlag fakeroot contained
|
syn keyword bbShFakeRootFlag fakeroot contained
|
||||||
syn match bbShFuncDef "^\(fakeroot\s*\)\?\([0-9A-Za-z_${}-]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbShFuncRegion skipwhite
|
syn match bbShFuncDef "^\(fakeroot\s*\)\?\([0-9A-Za-z_-]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbDelimiter nextgroup=bbShFuncRegion skipwhite
|
||||||
syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@shell
|
syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@shell
|
||||||
|
|
||||||
" Python value inside shell functions
|
|
||||||
syn region shDeref start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
|
|
||||||
|
|
||||||
" BitBake python metadata
|
" BitBake python metadata
|
||||||
syn keyword bbPyFlag python contained
|
syn keyword bbPyFlag python contained
|
||||||
syn match bbPyFuncDef "^\(python\s\+\)\([0-9A-Za-z_${}-]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPyFlag,bbFunction,bbVarDeref,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
|
syn match bbPyFuncDef "^\(python\s\+\)\([0-9A-Za-z_-]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPyFlag,bbFunction,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
|
||||||
syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" contained contains=@python
|
syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@python
|
||||||
|
|
||||||
" BitBake 'def'd python functions
|
" BitBake 'def'd python functions
|
||||||
syn keyword bbPyDef def contained
|
syn keyword bbPyDef def contained
|
||||||
|
|
|
@ -1,91 +0,0 @@
|
||||||
# This is a single Makefile to handle all generated BitBake documents.
|
|
||||||
# The Makefile needs to live in the documentation directory and all figures used
|
|
||||||
# in any manuals must be .PNG files and live in the individual book's figures
|
|
||||||
# directory.
|
|
||||||
#
|
|
||||||
# The Makefile has these targets:
|
|
||||||
#
|
|
||||||
# pdf: generates a PDF version of a manual.
|
|
||||||
# html: generates an HTML version of a manual.
|
|
||||||
# tarball: creates a tarball for the doc files.
|
|
||||||
# validate: validates
|
|
||||||
# clean: removes files
|
|
||||||
#
|
|
||||||
# The Makefile generates an HTML version of every document. The
|
|
||||||
# variable DOC indicates the folder name for a given manual.
|
|
||||||
#
|
|
||||||
# To build a manual, you must invoke 'make' with the DOC argument.
|
|
||||||
#
|
|
||||||
# Examples:
|
|
||||||
#
|
|
||||||
# make DOC=bitbake-user-manual
|
|
||||||
# make pdf DOC=bitbake-user-manual
|
|
||||||
#
|
|
||||||
# The first example generates the HTML version of the User Manual.
|
|
||||||
# The second example generates the PDF version of the User Manual.
|
|
||||||
#
|
|
||||||
|
|
||||||
ifeq ($(DOC),bitbake-user-manual)
|
|
||||||
XSLTOPTS = --stringparam html.stylesheet bitbake-user-manual-style.css \
|
|
||||||
--stringparam chapter.autolabel 1 \
|
|
||||||
--stringparam section.autolabel 1 \
|
|
||||||
--stringparam section.label.includes.component.label 1 \
|
|
||||||
--xinclude
|
|
||||||
ALLPREQ = html tarball
|
|
||||||
TARFILES = bitbake-user-manual-style.css bitbake-user-manual.html figures/bitbake-title.png
|
|
||||||
MANUALS = $(DOC)/$(DOC).html
|
|
||||||
FIGURES = figures
|
|
||||||
STYLESHEET = $(DOC)/*.css
|
|
||||||
|
|
||||||
endif
|
|
||||||
|
|
||||||
##
|
|
||||||
# These URI should be rewritten by your distribution's xml catalog to
|
|
||||||
# match your localy installed XSL stylesheets.
|
|
||||||
XSL_BASE_URI = http://docbook.sourceforge.net/release/xsl/current
|
|
||||||
XSL_XHTML_URI = $(XSL_BASE_URI)/xhtml/docbook.xsl
|
|
||||||
|
|
||||||
all: $(ALLPREQ)
|
|
||||||
|
|
||||||
pdf:
|
|
||||||
ifeq ($(DOC),bitbake-user-manual)
|
|
||||||
@echo " "
|
|
||||||
@echo "********** Building."$(DOC)
|
|
||||||
@echo " "
|
|
||||||
cd $(DOC); ../tools/docbook-to-pdf $(DOC).xml ../template; cd ..
|
|
||||||
endif
|
|
||||||
|
|
||||||
html:
|
|
||||||
ifeq ($(DOC),bitbake-user-manual)
|
|
||||||
# See http://www.sagehill.net/docbookxsl/HtmlOutput.html
|
|
||||||
@echo " "
|
|
||||||
@echo "******** Building "$(DOC)
|
|
||||||
@echo " "
|
|
||||||
cd $(DOC); xsltproc $(XSLTOPTS) -o $(DOC).html $(DOC)-customization.xsl $(DOC).xml; cd ..
|
|
||||||
endif
|
|
||||||
|
|
||||||
tarball: html
|
|
||||||
@echo " "
|
|
||||||
@echo "******** Creating Tarball of document files"
|
|
||||||
@echo " "
|
|
||||||
cd $(DOC); tar -cvzf $(DOC).tgz $(TARFILES); cd ..
|
|
||||||
|
|
||||||
validate:
|
|
||||||
cd $(DOC); xmllint --postvalid --xinclude --noout $(DOC).xml; cd ..
|
|
||||||
|
|
||||||
publish:
|
|
||||||
@if test -f $(DOC)/$(DOC).html; \
|
|
||||||
then \
|
|
||||||
echo " "; \
|
|
||||||
echo "******** Publishing "$(DOC)".html"; \
|
|
||||||
echo " "; \
|
|
||||||
scp -r $(MANUALS) $(STYLESHEET) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
|
|
||||||
cd $(DOC); scp -r $(FIGURES) docs.yp:/var/www/www.yoctoproject.org-docs/$(VER)/$(DOC); \
|
|
||||||
else \
|
|
||||||
echo " "; \
|
|
||||||
echo $(DOC)".html missing. Generate the file first then try again."; \
|
|
||||||
echo " "; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
clean:
|
|
||||||
rm -rf $(MANUALS); rm $(DOC)/$(DOC).tgz;
|
|
|
@ -1,39 +0,0 @@
|
||||||
Documentation
|
|
||||||
=============
|
|
||||||
|
|
||||||
This is the directory that contains the BitBake documentation.
|
|
||||||
|
|
||||||
Manual Organization
|
|
||||||
===================
|
|
||||||
|
|
||||||
Folders exist for individual manuals as follows:
|
|
||||||
|
|
||||||
* bitbake-user-manual - The BitBake User Manual
|
|
||||||
|
|
||||||
Each folder is self-contained regarding content and figures.
|
|
||||||
|
|
||||||
If you want to find HTML versions of the BitBake manuals on the web,
|
|
||||||
go to http://www.openembedded.org/wiki/Documentation.
|
|
||||||
|
|
||||||
Makefile
|
|
||||||
========
|
|
||||||
|
|
||||||
The Makefile processes manual directories to create HTML, PDF,
|
|
||||||
tarballs, etc. Details on how the Makefile work are documented
|
|
||||||
inside the Makefile. See that file for more information.
|
|
||||||
|
|
||||||
To build a manual, you run the make command and pass it the name
|
|
||||||
of the folder containing the manual's contents.
|
|
||||||
For example, the following command run from the documentation directory
|
|
||||||
creates an HTML and a PDF version of the BitBake User Manual.
|
|
||||||
The DOC variable specifies the manual you are making:
|
|
||||||
|
|
||||||
$ make DOC=bitbake-user-manual
|
|
||||||
|
|
||||||
template
|
|
||||||
========
|
|
||||||
Contains various templates, fonts, and some old PNG files.
|
|
||||||
|
|
||||||
tools
|
|
||||||
=====
|
|
||||||
Contains a tool to convert the DocBook files to PDF format.
|
|
|
@ -1,29 +0,0 @@
|
||||||
<?xml version='1.0'?>
|
|
||||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
|
||||||
|
|
||||||
<xsl:import href="http://downloads.yoctoproject.org/mirror/docbook-mirror/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
|
||||||
|
|
||||||
<!--
|
|
||||||
|
|
||||||
<xsl:import href="../template/1.76.1/docbook-xsl-1.76.1/xhtml/docbook.xsl" />
|
|
||||||
|
|
||||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/1.76.1/xhtml/docbook.xsl" />
|
|
||||||
|
|
||||||
-->
|
|
||||||
|
|
||||||
<xsl:include href="../template/permalinks.xsl"/>
|
|
||||||
<xsl:include href="../template/section.title.xsl"/>
|
|
||||||
<xsl:include href="../template/component.title.xsl"/>
|
|
||||||
<xsl:include href="../template/division.title.xsl"/>
|
|
||||||
<xsl:include href="../template/formal.object.heading.xsl"/>
|
|
||||||
<xsl:include href="../template/gloss-permalinks.xsl"/>
|
|
||||||
|
|
||||||
<xsl:param name="html.stylesheet" select="'user-manual-style.css'" />
|
|
||||||
<xsl:param name="chapter.autolabel" select="1" />
|
|
||||||
<xsl:param name="section.autolabel" select="1" />
|
|
||||||
<xsl:param name="section.label.includes.component.label" select="1" />
|
|
||||||
<xsl:param name="appendix.autolabel">A</xsl:param>
|
|
||||||
|
|
||||||
<!-- <xsl:param name="generate.toc" select="'article nop'"></xsl:param> -->
|
|
||||||
|
|
||||||
</xsl:stylesheet>
|
|
|
@ -1,932 +0,0 @@
|
||||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
|
||||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
|
||||||
|
|
||||||
<chapter id="bitbake-user-manual-execution">
|
|
||||||
<title>Execution</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The primary purpose for running BitBake is to produce some kind
|
|
||||||
of output such as a single installable package, a kernel, a software
|
|
||||||
development kit, or even a full, board-specific bootable Linux image,
|
|
||||||
complete with bootloader, kernel, and root filesystem.
|
|
||||||
Of course, you can execute the <filename>bitbake</filename>
|
|
||||||
command with options that cause it to execute single tasks,
|
|
||||||
compile single recipe files, capture or clear data, or simply
|
|
||||||
return information about the execution environment.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This chapter describes BitBake's execution process from start
|
|
||||||
to finish when you use it to create an image.
|
|
||||||
The execution process is launched using the following command
|
|
||||||
form:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake <replaceable>target</replaceable>
|
|
||||||
</literallayout>
|
|
||||||
For information on the BitBake command and its options,
|
|
||||||
see
|
|
||||||
"<link linkend='bitbake-user-manual-command'>The BitBake Command</link>"
|
|
||||||
section.
|
|
||||||
<note>
|
|
||||||
<para>
|
|
||||||
Prior to executing BitBake, you should take advantage of available
|
|
||||||
parallel thread execution on your build host by setting the
|
|
||||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
|
||||||
variable in your project's <filename>local.conf</filename>
|
|
||||||
configuration file.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
A common method to determine this value for your build host is to run
|
|
||||||
the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ grep processor /proc/cpuinfo
|
|
||||||
</literallayout>
|
|
||||||
This command returns the number of processors, which takes into
|
|
||||||
account hyper-threading.
|
|
||||||
Thus, a quad-core build host with hyper-threading most likely
|
|
||||||
shows eight processors, which is the value you would then assign to
|
|
||||||
<filename>BB_NUMBER_THREADS</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
A possibly simpler solution is that some Linux distributions
|
|
||||||
(e.g. Debian and Ubuntu) provide the <filename>ncpus</filename> command.
|
|
||||||
</para>
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<section id='parsing-the-base-configuration-metadata'>
|
|
||||||
<title>Parsing the Base Configuration Metadata</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The first thing BitBake does is parse base configuration
|
|
||||||
metadata.
|
|
||||||
Base configuration metadata consists of your project's
|
|
||||||
<filename>bblayers.conf</filename> file to determine what
|
|
||||||
layers BitBake needs to recognize, all necessary
|
|
||||||
<filename>layer.conf</filename> files (one from each layer),
|
|
||||||
and <filename>bitbake.conf</filename>.
|
|
||||||
The data itself is of various types:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis>Recipes:</emphasis>
|
|
||||||
Details about particular pieces of software.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Class Data:</emphasis>
|
|
||||||
An abstraction of common build information
|
|
||||||
(e.g. how to build a Linux kernel).
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Configuration Data:</emphasis>
|
|
||||||
Machine-specific settings, policy decisions,
|
|
||||||
and so forth.
|
|
||||||
Configuration data acts as the glue to bind everything
|
|
||||||
together.</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The <filename>layer.conf</filename> files are used to
|
|
||||||
construct key variables such as
|
|
||||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
|
||||||
and
|
|
||||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
|
||||||
<filename>BBPATH</filename> is used to search for
|
|
||||||
configuration and class files under the
|
|
||||||
<filename>conf</filename> and <filename>classes</filename>
|
|
||||||
directories, respectively.
|
|
||||||
<filename>BBFILES</filename> is used to locate both recipe
|
|
||||||
and recipe append files
|
|
||||||
(<filename>.bb</filename> and <filename>.bbappend</filename>).
|
|
||||||
If there is no <filename>bblayers.conf</filename> file,
|
|
||||||
it is assumed the user has set the <filename>BBPATH</filename>
|
|
||||||
and <filename>BBFILES</filename> directly in the environment.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Next, the <filename>bitbake.conf</filename> file is located
|
|
||||||
using the <filename>BBPATH</filename> variable that was
|
|
||||||
just constructed.
|
|
||||||
The <filename>bitbake.conf</filename> file may also include other
|
|
||||||
configuration files using the
|
|
||||||
<filename>include</filename> or
|
|
||||||
<filename>require</filename> directives.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Prior to parsing configuration files, Bitbake looks
|
|
||||||
at certain variables, including:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
<link linkend='var-BB_ENV_WHITELIST'><filename>BB_ENV_WHITELIST</filename></link>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<link linkend='var-BB_ENV_EXTRAWHITE'><filename>BB_ENV_EXTRAWHITE</filename></link>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<link linkend='var-BB_PRESERVE_ENV'><filename>BB_PRESERVE_ENV</filename></link>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<link linkend='var-BB_ORIGENV'><filename>BB_ORIGENV</filename></link>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<link linkend='var-BITBAKE_UI'><filename>BITBAKE_UI</filename></link>
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
The first four variables in this list relate to how BitBake treats shell
|
|
||||||
environment variables during task execution.
|
|
||||||
By default, BitBake cleans the environment variables and provides tight
|
|
||||||
control over the shell execution environment.
|
|
||||||
However, through the use of these first four variables, you can
|
|
||||||
apply your control regarding the
|
|
||||||
environment variables allowed to be used by BitBake in the shell
|
|
||||||
during execution of tasks.
|
|
||||||
See the
|
|
||||||
"<link linkend='passing-information-into-the-build-task-environment'>Passing Information Into the Build Task Environment</link>"
|
|
||||||
section and the information about these variables in the
|
|
||||||
variable glossary for more information on how they work and
|
|
||||||
on how to use them.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The base configuration metadata is global
|
|
||||||
and therefore affects all recipes and tasks that are executed.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake first searches the current working directory for an
|
|
||||||
optional <filename>conf/bblayers.conf</filename> configuration file.
|
|
||||||
This file is expected to contain a
|
|
||||||
<link linkend='var-BBLAYERS'><filename>BBLAYERS</filename></link>
|
|
||||||
variable that is a space-delimited list of 'layer' directories.
|
|
||||||
Recall that if BitBake cannot find a <filename>bblayers.conf</filename>
|
|
||||||
file, then it is assumed the user has set the <filename>BBPATH</filename>
|
|
||||||
and <filename>BBFILES</filename> variables directly in the environment.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
For each directory (layer) in this list, a <filename>conf/layer.conf</filename>
|
|
||||||
file is located and parsed with the
|
|
||||||
<link linkend='var-LAYERDIR'><filename>LAYERDIR</filename></link>
|
|
||||||
variable being set to the directory where the layer was found.
|
|
||||||
The idea is these files automatically set up
|
|
||||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
|
||||||
and other variables correctly for a given build directory.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake then expects to find the <filename>conf/bitbake.conf</filename>
|
|
||||||
file somewhere in the user-specified <filename>BBPATH</filename>.
|
|
||||||
That configuration file generally has include directives to pull
|
|
||||||
in any other metadata such as files specific to the architecture,
|
|
||||||
the machine, the local environment, and so forth.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Only variable definitions and include directives are allowed
|
|
||||||
in BitBake <filename>.conf</filename> files.
|
|
||||||
Some variables directly influence BitBake's behavior.
|
|
||||||
These variables might have been set from the environment
|
|
||||||
depending on the environment variables previously
|
|
||||||
mentioned or set in the configuration files.
|
|
||||||
The
|
|
||||||
"<link linkend='ref-variables-glos'>Variables Glossary</link>"
|
|
||||||
chapter presents a full list of variables.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
After parsing configuration files, BitBake uses its rudimentary
|
|
||||||
inheritance mechanism, which is through class files, to inherit
|
|
||||||
some standard classes.
|
|
||||||
BitBake parses a class when the inherit directive responsible
|
|
||||||
for getting that class is encountered.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The <filename>base.bbclass</filename> file is always included.
|
|
||||||
Other classes that are specified in the configuration using the
|
|
||||||
<link linkend='var-INHERIT'><filename>INHERIT</filename></link>
|
|
||||||
variable are also included.
|
|
||||||
BitBake searches for class files in a
|
|
||||||
<filename>classes</filename> subdirectory under
|
|
||||||
the paths in <filename>BBPATH</filename> in the same way as
|
|
||||||
configuration files.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
A good way to get an idea of the configuration files and
|
|
||||||
the class files used in your execution environment is to
|
|
||||||
run the following BitBake command:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake -e > mybb.log
|
|
||||||
</literallayout>
|
|
||||||
Examining the top of the <filename>mybb.log</filename>
|
|
||||||
shows you the many configuration files and class files
|
|
||||||
used in your execution environment.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<note>
|
|
||||||
<para>
|
|
||||||
You need to be aware of how BitBake parses curly braces.
|
|
||||||
If a recipe uses a closing curly brace within the function and
|
|
||||||
the character has no leading spaces, BitBake produces a parsing
|
|
||||||
error.
|
|
||||||
If you use a pair of curly braces in a shell function, the
|
|
||||||
closing curly brace must not be located at the start of the line
|
|
||||||
without leading spaces.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Here is an example that causes BitBake to produce a parsing
|
|
||||||
error:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
fakeroot create_shar() {
|
|
||||||
cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
|
|
||||||
usage()
|
|
||||||
{
|
|
||||||
echo "test"
|
|
||||||
###### The following "}" at the start of the line causes a parsing error ######
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
</literallayout>
|
|
||||||
Writing the recipe this way avoids the error:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
fakeroot create_shar() {
|
|
||||||
cat << "EOF" > ${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh
|
|
||||||
usage()
|
|
||||||
{
|
|
||||||
echo "test"
|
|
||||||
######The following "}" with a leading space at the start of the line avoids the error ######
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</note>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='locating-and-parsing-recipes'>
|
|
||||||
<title>Locating and Parsing Recipes</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
During the configuration phase, BitBake will have set
|
|
||||||
<link linkend='var-BBFILES'><filename>BBFILES</filename></link>.
|
|
||||||
BitBake now uses it to construct a list of recipes to parse,
|
|
||||||
along with any append files (<filename>.bbappend</filename>)
|
|
||||||
to apply.
|
|
||||||
<filename>BBFILES</filename> is a space-separated list of
|
|
||||||
available files and supports wildcards.
|
|
||||||
An example would be:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
BBFILES = "/path/to/bbfiles/*.bb /path/to/appends/*.bbappend"
|
|
||||||
</literallayout>
|
|
||||||
BitBake parses each recipe and append file located
|
|
||||||
with <filename>BBFILES</filename> and stores the values of
|
|
||||||
various variables into the datastore.
|
|
||||||
<note>
|
|
||||||
Append files are applied in the order they are encountered in
|
|
||||||
<filename>BBFILES</filename>.
|
|
||||||
</note>
|
|
||||||
For each file, a fresh copy of the base configuration is
|
|
||||||
made, then the recipe is parsed line by line.
|
|
||||||
Any inherit statements cause BitBake to find and
|
|
||||||
then parse class files (<filename>.bbclass</filename>)
|
|
||||||
using
|
|
||||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
|
||||||
as the search path.
|
|
||||||
Finally, BitBake parses in order any append files found in
|
|
||||||
<filename>BBFILES</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
One common convention is to use the recipe filename to define
|
|
||||||
pieces of metadata.
|
|
||||||
For example, in <filename>bitbake.conf</filename> the recipe
|
|
||||||
name and version are used to set the variables
|
|
||||||
<link linkend='var-PN'><filename>PN</filename></link> and
|
|
||||||
<link linkend='var-PV'><filename>PV</filename></link>:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
PN = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[0] or 'defaultpkgname'}"
|
|
||||||
PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0'}"
|
|
||||||
</literallayout>
|
|
||||||
In this example, a recipe called "something_1.2.3.bb" would set
|
|
||||||
<filename>PN</filename> to "something" and
|
|
||||||
<filename>PV</filename> to "1.2.3".
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
By the time parsing is complete for a recipe, BitBake
|
|
||||||
has a list of tasks that the recipe defines and a set of
|
|
||||||
data consisting of keys and values as well as
|
|
||||||
dependency information about the tasks.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake does not need all of this information.
|
|
||||||
It only needs a small subset of the information to make
|
|
||||||
decisions about the recipe.
|
|
||||||
Consequently, BitBake caches the values in which it is
|
|
||||||
interested and does not store the rest of the information.
|
|
||||||
Experience has shown it is faster to re-parse the metadata than to
|
|
||||||
try and write it out to the disk and then reload it.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Where possible, subsequent BitBake commands reuse this cache of
|
|
||||||
recipe information.
|
|
||||||
The validity of this cache is determined by first computing a
|
|
||||||
checksum of the base configuration data (see
|
|
||||||
<link linkend='var-BB_HASHCONFIG_WHITELIST'><filename>BB_HASHCONFIG_WHITELIST</filename></link>)
|
|
||||||
and then checking if the checksum matches.
|
|
||||||
If that checksum matches what is in the cache and the recipe
|
|
||||||
and class files have not changed, Bitbake is able to use
|
|
||||||
the cache.
|
|
||||||
BitBake then reloads the cached information about the recipe
|
|
||||||
instead of reparsing it from scratch.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Recipe file collections exist to allow the user to
|
|
||||||
have multiple repositories of
|
|
||||||
<filename>.bb</filename> files that contain the same
|
|
||||||
exact package.
|
|
||||||
For example, one could easily use them to make one's
|
|
||||||
own local copy of an upstream repository, but with
|
|
||||||
custom modifications that one does not want upstream.
|
|
||||||
Here is an example:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb"
|
|
||||||
BBFILE_COLLECTIONS = "upstream local"
|
|
||||||
BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
|
|
||||||
BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
|
|
||||||
BBFILE_PRIORITY_upstream = "5"
|
|
||||||
BBFILE_PRIORITY_local = "10"
|
|
||||||
</literallayout>
|
|
||||||
<note>
|
|
||||||
The layers mechanism is now the preferred method of collecting
|
|
||||||
code.
|
|
||||||
While the collections code remains, its main use is to set layer
|
|
||||||
priorities and to deal with overlap (conflicts) between layers.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='bb-bitbake-providers'>
|
|
||||||
<title>Providers</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Assuming BitBake has been instructed to execute a target
|
|
||||||
and that all the recipe files have been parsed, BitBake
|
|
||||||
starts to figure out how to build the target.
|
|
||||||
BitBake looks through the <filename>PROVIDES</filename> list
|
|
||||||
for each of the recipes.
|
|
||||||
A <filename>PROVIDES</filename> list is the list of names by which
|
|
||||||
the recipe can be known.
|
|
||||||
Each recipe's <filename>PROVIDES</filename> list is created
|
|
||||||
implicitly through the recipe's
|
|
||||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
|
||||||
and explicitly through the recipe's
|
|
||||||
<link linkend='var-PROVIDES'><filename>PROVIDES</filename></link>
|
|
||||||
variable, which is optional.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
When a recipe uses <filename>PROVIDES</filename>, that recipe's
|
|
||||||
functionality can be found under an alternative name or names other
|
|
||||||
than the implicit <filename>PN</filename> name.
|
|
||||||
As an example, suppose a recipe named <filename>keyboard_1.0.bb</filename>
|
|
||||||
contained the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
PROVIDES += "fullkeyboard"
|
|
||||||
</literallayout>
|
|
||||||
The <filename>PROVIDES</filename> list for this recipe becomes
|
|
||||||
"keyboard", which is implicit, and "fullkeyboard", which is explicit.
|
|
||||||
Consequently, the functionality found in
|
|
||||||
<filename>keyboard_1.0.bb</filename> can be found under two
|
|
||||||
different names.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='bb-bitbake-preferences'>
|
|
||||||
<title>Preferences</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The <filename>PROVIDES</filename> list is only part of the solution
|
|
||||||
for figuring out a target's recipes.
|
|
||||||
Because targets might have multiple providers, BitBake needs
|
|
||||||
to prioritize providers by determining provider preferences.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
A common example in which a target has multiple providers
|
|
||||||
is "virtual/kernel", which is on the
|
|
||||||
<filename>PROVIDES</filename> list for each kernel recipe.
|
|
||||||
Each machine often selects the best kernel provider by using a
|
|
||||||
line similar to the following in the machine configuration file:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
PREFERRED_PROVIDER_virtual/kernel = "linux-yocto"
|
|
||||||
</literallayout>
|
|
||||||
The default
|
|
||||||
<link linkend='var-PREFERRED_PROVIDER'><filename>PREFERRED_PROVIDER</filename></link>
|
|
||||||
is the provider with the same name as the target.
|
|
||||||
Bitbake iterates through each target it needs to build and
|
|
||||||
resolves them and their dependencies using this process.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Understanding how providers are chosen is made complicated by the fact
|
|
||||||
that multiple versions might exist for a given provider.
|
|
||||||
BitBake defaults to the highest version of a provider.
|
|
||||||
Version comparisons are made using the same method as Debian.
|
|
||||||
You can use the
|
|
||||||
<link linkend='var-PREFERRED_VERSION'><filename>PREFERRED_VERSION</filename></link>
|
|
||||||
variable to specify a particular version.
|
|
||||||
You can influence the order by using the
|
|
||||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
|
||||||
variable.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
By default, files have a preference of "0".
|
|
||||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "-1" makes the
|
|
||||||
recipe unlikely to be used unless it is explicitly referenced.
|
|
||||||
Setting <filename>DEFAULT_PREFERENCE</filename> to "1" makes it
|
|
||||||
likely the recipe is used.
|
|
||||||
<filename>PREFERRED_VERSION</filename> overrides any
|
|
||||||
<filename>DEFAULT_PREFERENCE</filename> setting.
|
|
||||||
<filename>DEFAULT_PREFERENCE</filename> is often used to mark newer
|
|
||||||
and more experimental recipe versions until they have undergone
|
|
||||||
sufficient testing to be considered stable.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
When there are multiple “versions” of a given recipe,
|
|
||||||
BitBake defaults to selecting the most recent
|
|
||||||
version, unless otherwise specified.
|
|
||||||
If the recipe in question has a
|
|
||||||
<link linkend='var-DEFAULT_PREFERENCE'><filename>DEFAULT_PREFERENCE</filename></link>
|
|
||||||
set lower than the other recipes (default is 0), then
|
|
||||||
it will not be selected.
|
|
||||||
This allows the person or persons maintaining
|
|
||||||
the repository of recipe files to specify
|
|
||||||
their preference for the default selected version.
|
|
||||||
Additionally, the user can specify their preferred version.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
If the first recipe is named <filename>a_1.1.bb</filename>, then the
|
|
||||||
<link linkend='var-PN'><filename>PN</filename></link> variable
|
|
||||||
will be set to “a”, and the
|
|
||||||
<link linkend='var-PV'><filename>PV</filename></link>
|
|
||||||
variable will be set to 1.1.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Thus, if a recipe named <filename>a_1.2.bb</filename> exists, BitBake
|
|
||||||
will choose 1.2 by default.
|
|
||||||
However, if you define the following variable in a
|
|
||||||
<filename>.conf</filename> file that BitBake parses, you
|
|
||||||
can change that preference:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
PREFERRED_VERSION_a = "1.1"
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<note>
|
|
||||||
<para>
|
|
||||||
It is common for a recipe to provide two versions -- a stable,
|
|
||||||
numbered (and preferred) version, and a version that is
|
|
||||||
automatically checked out from a source code repository that
|
|
||||||
is considered more "bleeding edge" but can be selected only
|
|
||||||
explicitly.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
For example, in the OpenEmbedded codebase, there is a standard,
|
|
||||||
versioned recipe file for BusyBox,
|
|
||||||
<filename>busybox_1.22.1.bb</filename>,
|
|
||||||
but there is also a Git-based version,
|
|
||||||
<filename>busybox_git.bb</filename>, which explicitly contains the line
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
DEFAULT_PREFERENCE = "-1"
|
|
||||||
</literallayout>
|
|
||||||
to ensure that the numbered, stable version is always preferred
|
|
||||||
unless the developer selects otherwise.
|
|
||||||
</para>
|
|
||||||
</note>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='bb-bitbake-dependencies'>
|
|
||||||
<title>Dependencies</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Each target BitBake builds consists of multiple tasks such as
|
|
||||||
<filename>fetch</filename>, <filename>unpack</filename>,
|
|
||||||
<filename>patch</filename>, <filename>configure</filename>,
|
|
||||||
and <filename>compile</filename>.
|
|
||||||
For best performance on multi-core systems, BitBake considers each
|
|
||||||
task as an independent
|
|
||||||
entity with its own set of dependencies.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Dependencies are defined through several variables.
|
|
||||||
You can find information about variables BitBake uses in
|
|
||||||
the <link linkend='ref-variables-glos'>Variables Glossary</link>
|
|
||||||
near the end of this manual.
|
|
||||||
At a basic level, it is sufficient to know that BitBake uses the
|
|
||||||
<link linkend='var-DEPENDS'><filename>DEPENDS</filename></link> and
|
|
||||||
<link linkend='var-RDEPENDS'><filename>RDEPENDS</filename></link> variables when
|
|
||||||
calculating dependencies.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
For more information on how BitBake handles dependencies, see the
|
|
||||||
"<link linkend='dependencies'>Dependencies</link>" section.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='ref-bitbake-tasklist'>
|
|
||||||
<title>The Task List</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Based on the generated list of providers and the dependency information,
|
|
||||||
BitBake can now calculate exactly what tasks it needs to run and in what
|
|
||||||
order it needs to run them.
|
|
||||||
The
|
|
||||||
"<link linkend='executing-tasks'>Executing Tasks</link>" section has more
|
|
||||||
information on how BitBake chooses which task to execute next.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The build now starts with BitBake forking off threads up to the limit set in the
|
|
||||||
<link linkend='var-BB_NUMBER_THREADS'><filename>BB_NUMBER_THREADS</filename></link>
|
|
||||||
variable.
|
|
||||||
BitBake continues to fork threads as long as there are tasks ready to run,
|
|
||||||
those tasks have all their dependencies met, and the thread threshold has not been
|
|
||||||
exceeded.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
It is worth noting that you can greatly speed up the build time by properly setting
|
|
||||||
the <filename>BB_NUMBER_THREADS</filename> variable.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
As each task completes, a timestamp is written to the directory specified by the
|
|
||||||
<link linkend='var-STAMP'><filename>STAMP</filename></link> variable.
|
|
||||||
On subsequent runs, BitBake looks in the build directory within
|
|
||||||
<filename>tmp/stamps</filename> and does not rerun
|
|
||||||
tasks that are already completed unless a timestamp is found to be invalid.
|
|
||||||
Currently, invalid timestamps are only considered on a per
|
|
||||||
recipe file basis.
|
|
||||||
So, for example, if the configure stamp has a timestamp greater than the
|
|
||||||
compile timestamp for a given target, then the compile task would rerun.
|
|
||||||
Running the compile task again, however, has no effect on other providers
|
|
||||||
that depend on that target.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The exact format of the stamps is partly configurable.
|
|
||||||
In modern versions of BitBake, a hash is appended to the
|
|
||||||
stamp so that if the configuration changes, the stamp becomes
|
|
||||||
invalid and the task is automatically rerun.
|
|
||||||
This hash, or signature used, is governed by the signature policy
|
|
||||||
that is configured (see the
|
|
||||||
"<link linkend='checksums'>Checksums (Signatures)</link>"
|
|
||||||
section for information).
|
|
||||||
It is also possible to append extra metadata to the stamp using
|
|
||||||
the <filename>[stamp-extra-info]</filename> task flag.
|
|
||||||
For example, OpenEmbedded uses this flag to make some tasks machine-specific.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<note>
|
|
||||||
Some tasks are marked as "nostamp" tasks.
|
|
||||||
No timestamp file is created when these tasks are run.
|
|
||||||
Consequently, "nostamp" tasks are always rerun.
|
|
||||||
</note>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
For more information on tasks, see the
|
|
||||||
"<link linkend='tasks'>Tasks</link>" section.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='executing-tasks'>
|
|
||||||
<title>Executing Tasks</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Tasks can be either a shell task or a Python task.
|
|
||||||
For shell tasks, BitBake writes a shell script to
|
|
||||||
<filename>${</filename><link linkend='var-T'><filename>T</filename></link><filename>}/run.do_taskname.pid</filename>
|
|
||||||
and then executes the script.
|
|
||||||
The generated shell script contains all the exported variables,
|
|
||||||
and the shell functions with all variables expanded.
|
|
||||||
Output from the shell script goes to the file
|
|
||||||
<filename>${T}/log.do_taskname.pid</filename>.
|
|
||||||
Looking at the expanded shell functions in the run file and
|
|
||||||
the output in the log files is a useful debugging technique.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
For Python tasks, BitBake executes the task internally and logs
|
|
||||||
information to the controlling terminal.
|
|
||||||
Future versions of BitBake will write the functions to files
|
|
||||||
similar to the way shell tasks are handled.
|
|
||||||
Logging will be handled in a way similar to shell tasks as well.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The order in which BitBake runs the tasks is controlled by its
|
|
||||||
task scheduler.
|
|
||||||
It is possible to configure the scheduler and define custom
|
|
||||||
implementations for specific use cases.
|
|
||||||
For more information, see these variables that control the
|
|
||||||
behavior:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
<link linkend='var-BB_SCHEDULER'><filename>BB_SCHEDULER</filename></link>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<link linkend='var-BB_SCHEDULERS'><filename>BB_SCHEDULERS</filename></link>
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
It is possible to have functions run before and after a task's main
|
|
||||||
function.
|
|
||||||
This is done using the <filename>[prefuncs]</filename>
|
|
||||||
and <filename>[postfuncs]</filename> flags of the task
|
|
||||||
that lists the functions to run.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='checksums'>
|
|
||||||
<title>Checksums (Signatures)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
A checksum is a unique signature of a task's inputs.
|
|
||||||
The signature of a task can be used to determine if a task
|
|
||||||
needs to be run.
|
|
||||||
Because it is a change in a task's inputs that triggers running
|
|
||||||
the task, BitBake needs to detect all the inputs to a given task.
|
|
||||||
For shell tasks, this turns out to be fairly easy because
|
|
||||||
BitBake generates a "run" shell script for each task and
|
|
||||||
it is possible to create a checksum that gives you a good idea of when
|
|
||||||
the task's data changes.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
To complicate the problem, some things should not be included in
|
|
||||||
the checksum.
|
|
||||||
First, there is the actual specific build path of a given task -
|
|
||||||
the working directory.
|
|
||||||
It does not matter if the working directory changes because it should not
|
|
||||||
affect the output for target packages.
|
|
||||||
The simplistic approach for excluding the working directory is to set
|
|
||||||
it to some fixed value and create the checksum for the "run" script.
|
|
||||||
BitBake goes one step better and uses the
|
|
||||||
<link linkend='var-BB_HASHBASE_WHITELIST'><filename>BB_HASHBASE_WHITELIST</filename></link>
|
|
||||||
variable to define a list of variables that should never be included
|
|
||||||
when generating the signatures.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Another problem results from the "run" scripts containing functions that
|
|
||||||
might or might not get called.
|
|
||||||
The incremental build solution contains code that figures out dependencies
|
|
||||||
between shell functions.
|
|
||||||
This code is used to prune the "run" scripts down to the minimum set,
|
|
||||||
thereby alleviating this problem and making the "run" scripts much more
|
|
||||||
readable as a bonus.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
So far we have solutions for shell scripts.
|
|
||||||
What about Python tasks?
|
|
||||||
The same approach applies even though these tasks are more difficult.
|
|
||||||
The process needs to figure out what variables a Python function accesses
|
|
||||||
and what functions it calls.
|
|
||||||
Again, the incremental build solution contains code that first figures out
|
|
||||||
the variable and function dependencies, and then creates a checksum for the data
|
|
||||||
used as the input to the task.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Like the working directory case, situations exist where dependencies
|
|
||||||
should be ignored.
|
|
||||||
For these cases, you can instruct the build process to ignore a dependency
|
|
||||||
by using a line like the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
PACKAGE_ARCHS[vardepsexclude] = "MACHINE"
|
|
||||||
</literallayout>
|
|
||||||
This example ensures that the <filename>PACKAGE_ARCHS</filename> variable does not
|
|
||||||
depend on the value of <filename>MACHINE</filename>, even if it does reference it.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Equally, there are cases where we need to add dependencies BitBake
|
|
||||||
is not able to find.
|
|
||||||
You can accomplish this by using a line like the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
PACKAGE_ARCHS[vardeps] = "MACHINE"
|
|
||||||
</literallayout>
|
|
||||||
This example explicitly adds the <filename>MACHINE</filename> variable as a
|
|
||||||
dependency for <filename>PACKAGE_ARCHS</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Consider a case with in-line Python, for example, where BitBake is not
|
|
||||||
able to figure out dependencies.
|
|
||||||
When running in debug mode (i.e. using <filename>-DDD</filename>), BitBake
|
|
||||||
produces output when it discovers something for which it cannot figure out
|
|
||||||
dependencies.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Thus far, this section has limited discussion to the direct inputs into a task.
|
|
||||||
Information based on direct inputs is referred to as the "basehash" in the
|
|
||||||
code.
|
|
||||||
However, there is still the question of a task's indirect inputs - the
|
|
||||||
things that were already built and present in the build directory.
|
|
||||||
The checksum (or signature) for a particular task needs to add the hashes
|
|
||||||
of all the tasks on which the particular task depends.
|
|
||||||
Choosing which dependencies to add is a policy decision.
|
|
||||||
However, the effect is to generate a master checksum that combines the basehash
|
|
||||||
and the hashes of the task's dependencies.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
At the code level, there are a variety of ways both the basehash and the
|
|
||||||
dependent task hashes can be influenced.
|
|
||||||
Within the BitBake configuration file, we can give BitBake some extra information
|
|
||||||
to help it construct the basehash.
|
|
||||||
The following statement effectively results in a list of global variable
|
|
||||||
dependency excludes - variables never included in any checksum.
|
|
||||||
This example uses variables from OpenEmbedded to help illustrate
|
|
||||||
the concept:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH DL_DIR \
|
|
||||||
SSTATE_DIR THISDIR FILESEXTRAPATHS FILE_DIRNAME HOME LOGNAME SHELL TERM \
|
|
||||||
USER FILESPATH STAGING_DIR_HOST STAGING_DIR_TARGET COREBASE PRSERV_HOST \
|
|
||||||
PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \
|
|
||||||
CCACHE_DIR EXTERNAL_TOOLCHAIN CCACHE CCACHE_DISABLE LICENSE_PATH SDKPKGSUFFIX"
|
|
||||||
</literallayout>
|
|
||||||
The previous example excludes the work directory, which is part of
|
|
||||||
<filename>TMPDIR</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The rules for deciding which hashes of dependent tasks to include through
|
|
||||||
dependency chains are more complex and are generally accomplished with a
|
|
||||||
Python function.
|
|
||||||
The code in <filename>meta/lib/oe/sstatesig.py</filename> shows two examples
|
|
||||||
of this and also illustrates how you can insert your own policy into the system
|
|
||||||
if so desired.
|
|
||||||
This file defines the two basic signature generators OpenEmbedded Core
|
|
||||||
uses: "OEBasic" and "OEBasicHash".
|
|
||||||
By default, there is a dummy "noop" signature handler enabled in BitBake.
|
|
||||||
This means that behavior is unchanged from previous versions.
|
|
||||||
<filename>OE-Core</filename> uses the "OEBasicHash" signature handler by default
|
|
||||||
through this setting in the <filename>bitbake.conf</filename> file:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
BB_SIGNATURE_HANDLER ?= "OEBasicHash"
|
|
||||||
</literallayout>
|
|
||||||
The "OEBasicHash" <filename>BB_SIGNATURE_HANDLER</filename> is the same as the
|
|
||||||
"OEBasic" version but adds the task hash to the stamp files.
|
|
||||||
This results in any metadata change that changes the task hash, automatically
|
|
||||||
causing the task to be run again.
|
|
||||||
This removes the need to bump
|
|
||||||
<link linkend='var-PR'><filename>PR</filename></link>
|
|
||||||
values, and changes to metadata automatically ripple across the build.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
It is also worth noting that the end result of these signature generators is to
|
|
||||||
make some dependency and hash information available to the build.
|
|
||||||
This information includes:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><filename>BB_BASEHASH_task-</filename><replaceable>taskname</replaceable>:
|
|
||||||
The base hashes for each task in the recipe.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><filename>BB_BASEHASH_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
|
||||||
The base hashes for each dependent task.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><filename>BBHASHDEPS_</filename><replaceable>filename</replaceable><filename>:</filename><replaceable>taskname</replaceable>:
|
|
||||||
The task dependencies for each task.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><filename>BB_TASKHASH</filename>:
|
|
||||||
The hash of the currently running task.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
It is worth noting that BitBake's "-S" option lets you
|
|
||||||
debug Bitbake's processing of signatures.
|
|
||||||
The options passed to -S allow different debugging modes
|
|
||||||
to be used, either using BitBake's own debug functions
|
|
||||||
or possibly those defined in the metadata/signature handler
|
|
||||||
itself.
|
|
||||||
The simplest parameter to pass is "none", which causes a
|
|
||||||
set of signature information to be written out into
|
|
||||||
<filename>STAMPS_DIR</filename>
|
|
||||||
corresponding to the targets specified.
|
|
||||||
The other currently available parameter is "printdiff",
|
|
||||||
which causes BitBake to try to establish the closest
|
|
||||||
signature match it can (e.g. in the sstate cache) and then
|
|
||||||
run <filename>bitbake-diffsigs</filename> over the matches
|
|
||||||
to determine the stamps and delta where these two
|
|
||||||
stamp trees diverge.
|
|
||||||
<note>
|
|
||||||
It is likely that future versions of BitBake will
|
|
||||||
provide other signature handlers triggered through
|
|
||||||
additional "-S" parameters.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
You can find more information on checksum metadata in the
|
|
||||||
"<link linkend='task-checksums-and-setscene'>Task Checksums and Setscene</link>"
|
|
||||||
section.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='setscene'>
|
|
||||||
<title>Setscene</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The setscene process enables BitBake to handle "pre-built" artifacts.
|
|
||||||
The ability to handle and reuse these artifacts allows BitBake
|
|
||||||
the luxury of not having to build something from scratch every time.
|
|
||||||
Instead, BitBake can use, when possible, existing build artifacts.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake needs to have reliable data indicating whether or not an
|
|
||||||
artifact is compatible.
|
|
||||||
Signatures, described in the previous section, provide an ideal
|
|
||||||
way of representing whether an artifact is compatible.
|
|
||||||
If a signature is the same, an object can be reused.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
If an object can be reused, the problem then becomes how to
|
|
||||||
replace a given task or set of tasks with the pre-built artifact.
|
|
||||||
BitBake solves the problem with the "setscene" process.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
When BitBake is asked to build a given target, before building anything,
|
|
||||||
it first asks whether cached information is available for any of the
|
|
||||||
targets it's building, or any of the intermediate targets.
|
|
||||||
If cached information is available, BitBake uses this information instead of
|
|
||||||
running the main tasks.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake first calls the function defined by the
|
|
||||||
<link linkend='var-BB_HASHCHECK_FUNCTION'><filename>BB_HASHCHECK_FUNCTION</filename></link>
|
|
||||||
variable with a list of tasks and corresponding
|
|
||||||
hashes it wants to build.
|
|
||||||
This function is designed to be fast and returns a list
|
|
||||||
of the tasks for which it believes in can obtain artifacts.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Next, for each of the tasks that were returned as possibilities,
|
|
||||||
BitBake executes a setscene version of the task that the possible
|
|
||||||
artifact covers.
|
|
||||||
Setscene versions of a task have the string "_setscene" appended to the
|
|
||||||
task name.
|
|
||||||
So, for example, the task with the name <filename>xxx</filename> has
|
|
||||||
a setscene task named <filename>xxx_setscene</filename>.
|
|
||||||
The setscene version of the task executes and provides the necessary
|
|
||||||
artifacts returning either success or failure.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
As previously mentioned, an artifact can cover more than one task.
|
|
||||||
For example, it is pointless to obtain a compiler if you
|
|
||||||
already have the compiled binary.
|
|
||||||
To handle this, BitBake calls the
|
|
||||||
<link linkend='var-BB_SETSCENE_DEPVALID'><filename>BB_SETSCENE_DEPVALID</filename></link>
|
|
||||||
function for each successful setscene task to know whether or not it needs
|
|
||||||
to obtain the dependencies of that task.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Finally, after all the setscene tasks have executed, BitBake calls the
|
|
||||||
function listed in
|
|
||||||
<link linkend='var-BB_SETSCENE_VERIFY_FUNCTION2'><filename>BB_SETSCENE_VERIFY_FUNCTION2</filename></link>
|
|
||||||
with the list of tasks BitBake thinks has been "covered".
|
|
||||||
The metadata can then ensure that this list is correct and can
|
|
||||||
inform BitBake that it wants specific tasks to be run regardless
|
|
||||||
of the setscene result.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
You can find more information on setscene metadata in the
|
|
||||||
"<link linkend='task-checksums-and-setscene'>Task Checksums and Setscene</link>"
|
|
||||||
section.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
</chapter>
|
|
|
@ -1,829 +0,0 @@
|
||||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
|
||||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
|
||||||
|
|
||||||
<chapter>
|
|
||||||
<title>File Download Support</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake's fetch module is a standalone piece of library code
|
|
||||||
that deals with the intricacies of downloading source code
|
|
||||||
and files from remote systems.
|
|
||||||
Fetching source code is one of the cornerstones of building software.
|
|
||||||
As such, this module forms an important part of BitBake.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The current fetch module is called "fetch2" and refers to the
|
|
||||||
fact that it is the second major version of the API.
|
|
||||||
The original version is obsolete and has been removed from the codebase.
|
|
||||||
Thus, in all cases, "fetch" refers to "fetch2" in this
|
|
||||||
manual.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<section id='the-download-fetch'>
|
|
||||||
<title>The Download (Fetch)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake takes several steps when fetching source code or files.
|
|
||||||
The fetcher codebase deals with two distinct processes in order:
|
|
||||||
obtaining the files from somewhere (cached or otherwise)
|
|
||||||
and then unpacking those files into a specific location and
|
|
||||||
perhaps in a specific way.
|
|
||||||
Getting and unpacking the files is often optionally followed
|
|
||||||
by patching.
|
|
||||||
Patching, however, is not covered by this module.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The code to execute the first part of this process, a fetch,
|
|
||||||
looks something like the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
src_uri = (d.getVar('SRC_URI') or "").split()
|
|
||||||
fetcher = bb.fetch2.Fetch(src_uri, d)
|
|
||||||
fetcher.download()
|
|
||||||
</literallayout>
|
|
||||||
This code sets up an instance of the fetch class.
|
|
||||||
The instance uses a space-separated list of URLs from the
|
|
||||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
|
||||||
variable and then calls the <filename>download</filename>
|
|
||||||
method to download the files.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The instantiation of the fetch class is usually followed by:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
rootdir = l.getVar('WORKDIR')
|
|
||||||
fetcher.unpack(rootdir)
|
|
||||||
</literallayout>
|
|
||||||
This code unpacks the downloaded files to the
|
|
||||||
specified by <filename>WORKDIR</filename>.
|
|
||||||
<note>
|
|
||||||
For convenience, the naming in these examples matches
|
|
||||||
the variables used by OpenEmbedded.
|
|
||||||
If you want to see the above code in action, examine
|
|
||||||
the OpenEmbedded class file <filename>base.bbclass</filename>.
|
|
||||||
</note>
|
|
||||||
The <filename>SRC_URI</filename> and <filename>WORKDIR</filename>
|
|
||||||
variables are not hardcoded into the fetcher, since those fetcher
|
|
||||||
methods can be (and are) called with different variable names.
|
|
||||||
In OpenEmbedded for example, the shared state (sstate) code uses
|
|
||||||
the fetch module to fetch the sstate files.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
When the <filename>download()</filename> method is called,
|
|
||||||
BitBake tries to resolve the URLs by looking for source files
|
|
||||||
in a specific search order:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis>Pre-mirror Sites:</emphasis>
|
|
||||||
BitBake first uses pre-mirrors to try and find source files.
|
|
||||||
These locations are defined using the
|
|
||||||
<link linkend='var-PREMIRRORS'><filename>PREMIRRORS</filename></link>
|
|
||||||
variable.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Source URI:</emphasis>
|
|
||||||
If pre-mirrors fail, BitBake uses the original URL (e.g from
|
|
||||||
<filename>SRC_URI</filename>).
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Mirror Sites:</emphasis>
|
|
||||||
If fetch failures occur, BitBake next uses mirror locations as
|
|
||||||
defined by the
|
|
||||||
<link linkend='var-MIRRORS'><filename>MIRRORS</filename></link>
|
|
||||||
variable.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
For each URL passed to the fetcher, the fetcher
|
|
||||||
calls the submodule that handles that particular URL type.
|
|
||||||
This behavior can be the source of some confusion when you
|
|
||||||
are providing URLs for the <filename>SRC_URI</filename>
|
|
||||||
variable.
|
|
||||||
Consider the following two URLs:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
http://git.yoctoproject.org/git/poky;protocol=git
|
|
||||||
git://git.yoctoproject.org/git/poky;protocol=http
|
|
||||||
</literallayout>
|
|
||||||
In the former case, the URL is passed to the
|
|
||||||
<filename>wget</filename> fetcher, which does not
|
|
||||||
understand "git".
|
|
||||||
Therefore, the latter case is the correct form since the
|
|
||||||
Git fetcher does know how to use HTTP as a transport.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Here are some examples that show commonly used mirror
|
|
||||||
definitions:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
PREMIRRORS ?= "\
|
|
||||||
bzr://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
cvs://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
git://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
hg://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
osc://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
p4://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
svn://.*/.* http://somemirror.org/sources/ \n"
|
|
||||||
|
|
||||||
MIRRORS =+ "\
|
|
||||||
ftp://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
http://.*/.* http://somemirror.org/sources/ \n \
|
|
||||||
https://.*/.* http://somemirror.org/sources/ \n"
|
|
||||||
</literallayout>
|
|
||||||
It is useful to note that BitBake supports
|
|
||||||
cross-URLs.
|
|
||||||
It is possible to mirror a Git repository on an HTTP
|
|
||||||
server as a tarball.
|
|
||||||
This is what the <filename>git://</filename> mapping in
|
|
||||||
the previous example does.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Since network accesses are slow, Bitbake maintains a
|
|
||||||
cache of files downloaded from the network.
|
|
||||||
Any source files that are not local (i.e.
|
|
||||||
downloaded from the Internet) are placed into the download
|
|
||||||
directory, which is specified by the
|
|
||||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
|
||||||
variable.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
File integrity is of key importance for reproducing builds.
|
|
||||||
For non-local archive downloads, the fetcher code can verify
|
|
||||||
SHA-256 and MD5 checksums to ensure the archives have been
|
|
||||||
downloaded correctly.
|
|
||||||
You can specify these checksums by using the
|
|
||||||
<filename>SRC_URI</filename> variable with the appropriate
|
|
||||||
varflags as follows:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI[md5sum] = "<replaceable>value</replaceable>"
|
|
||||||
SRC_URI[sha256sum] = "<replaceable>value</replaceable>"
|
|
||||||
</literallayout>
|
|
||||||
You can also specify the checksums as parameters on the
|
|
||||||
<filename>SRC_URI</filename> as shown below:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "http://example.com/foobar.tar.bz2;md5sum=4a8e0f237e961fd7785d19d07fdb994d"
|
|
||||||
</literallayout>
|
|
||||||
If multiple URIs exist, you can specify the checksums either
|
|
||||||
directly as in the previous example, or you can name the URLs.
|
|
||||||
The following syntax shows how you name the URIs:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "http://example.com/foobar.tar.bz2;name=foo"
|
|
||||||
SRC_URI[foo.md5sum] = 4a8e0f237e961fd7785d19d07fdb994d
|
|
||||||
</literallayout>
|
|
||||||
After a file has been downloaded and has had its checksum checked,
|
|
||||||
a ".done" stamp is placed in <filename>DL_DIR</filename>.
|
|
||||||
BitBake uses this stamp during subsequent builds to avoid
|
|
||||||
downloading or comparing a checksum for the file again.
|
|
||||||
<note>
|
|
||||||
It is assumed that local storage is safe from data corruption.
|
|
||||||
If this were not the case, there would be bigger issues to worry about.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
If
|
|
||||||
<link linkend='var-BB_STRICT_CHECKSUM'><filename>BB_STRICT_CHECKSUM</filename></link>
|
|
||||||
is set, any download without a checksum triggers an
|
|
||||||
error message.
|
|
||||||
The
|
|
||||||
<link linkend='var-BB_NO_NETWORK'><filename>BB_NO_NETWORK</filename></link>
|
|
||||||
variable can be used to make any attempted network access a fatal
|
|
||||||
error, which is useful for checking that mirrors are complete
|
|
||||||
as well as other things.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='bb-the-unpack'>
|
|
||||||
<title>The Unpack</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The unpack process usually immediately follows the download.
|
|
||||||
For all URLs except Git URLs, BitBake uses the common
|
|
||||||
<filename>unpack</filename> method.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
A number of parameters exist that you can specify within the
|
|
||||||
URL to govern the behavior of the unpack stage:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis>unpack:</emphasis>
|
|
||||||
Controls whether the URL components are unpacked.
|
|
||||||
If set to "1", which is the default, the components
|
|
||||||
are unpacked.
|
|
||||||
If set to "0", the unpack stage leaves the file alone.
|
|
||||||
This parameter is useful when you want an archive to be
|
|
||||||
copied in and not be unpacked.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>dos:</emphasis>
|
|
||||||
Applies to <filename>.zip</filename> and
|
|
||||||
<filename>.jar</filename> files and specifies whether to
|
|
||||||
use DOS line ending conversion on text files.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>basepath:</emphasis>
|
|
||||||
Instructs the unpack stage to strip the specified
|
|
||||||
directories from the source path when unpacking.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>subdir:</emphasis>
|
|
||||||
Unpacks the specific URL to the specified subdirectory
|
|
||||||
within the root directory.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
The unpack call automatically decompresses and extracts files
|
|
||||||
with ".Z", ".z", ".gz", ".xz", ".zip", ".jar", ".ipk", ".rpm".
|
|
||||||
".srpm", ".deb" and ".bz2" extensions as well as various combinations
|
|
||||||
of tarball extensions.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
As mentioned, the Git fetcher has its own unpack method that
|
|
||||||
is optimized to work with Git trees.
|
|
||||||
Basically, this method works by cloning the tree into the final
|
|
||||||
directory.
|
|
||||||
The process is completed using references so that there is
|
|
||||||
only one central copy of the Git metadata needed.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='bb-fetchers'>
|
|
||||||
<title>Fetchers</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
As mentioned earlier, the URL prefix determines which
|
|
||||||
fetcher submodule BitBake uses.
|
|
||||||
Each submodule can support different URL parameters,
|
|
||||||
which are described in the following sections.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<section id='local-file-fetcher'>
|
|
||||||
<title>Local file fetcher (<filename>file://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This submodule handles URLs that begin with
|
|
||||||
<filename>file://</filename>.
|
|
||||||
The filename you specify within the URL can be
|
|
||||||
either an absolute or relative path to a file.
|
|
||||||
If the filename is relative, the contents of the
|
|
||||||
<link linkend='var-FILESPATH'><filename>FILESPATH</filename></link>
|
|
||||||
variable is used in the same way
|
|
||||||
<filename>PATH</filename> is used to find executables.
|
|
||||||
If the file cannot be found, it is assumed that it is available in
|
|
||||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
|
||||||
by the time the <filename>download()</filename> method is called.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
If you specify a directory, the entire directory is
|
|
||||||
unpacked.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Here are a couple of example URLs, the first relative and
|
|
||||||
the second absolute:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "file://relativefile.patch"
|
|
||||||
SRC_URI = "file:///Users/ich/very_important_software"
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='http-ftp-fetcher'>
|
|
||||||
<title>HTTP/FTP wget fetcher (<filename>http://</filename>, <filename>ftp://</filename>, <filename>https://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This fetcher obtains files from web and FTP servers.
|
|
||||||
Internally, the fetcher uses the wget utility.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The executable and parameters used are specified by the
|
|
||||||
<filename>FETCHCMD_wget</filename> variable, which defaults
|
|
||||||
to sensible values.
|
|
||||||
The fetcher supports a parameter "downloadfilename" that
|
|
||||||
allows the name of the downloaded file to be specified.
|
|
||||||
Specifying the name of the downloaded file is useful
|
|
||||||
for avoiding collisions in
|
|
||||||
<link linkend='var-DL_DIR'><filename>DL_DIR</filename></link>
|
|
||||||
when dealing with multiple files that have the same name.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Some example URLs are as follows:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "http://oe.handhelds.org/not_there.aac"
|
|
||||||
SRC_URI = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
|
||||||
SRC_URI = "ftp://you@oe.handhelds.org/home/you/secret.plan"
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
<note>
|
|
||||||
Because URL parameters are delimited by semi-colons, this can
|
|
||||||
introduce ambiguity when parsing URLs that also contain semi-colons,
|
|
||||||
for example:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git;a=snapshot;h=a5dd47"
|
|
||||||
</literallayout>
|
|
||||||
Such URLs should should be modified by replacing semi-colons with '&' characters:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47"
|
|
||||||
</literallayout>
|
|
||||||
In most cases this should work. Treating semi-colons and '&' in queries
|
|
||||||
identically is recommended by the World Wide Web Consortium (W3C).
|
|
||||||
Note that due to the nature of the URL, you may have to specify the name
|
|
||||||
of the downloaded file as well:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "http://abc123.org/git/?p=gcc/gcc.git&a=snapshot&h=a5dd47;downloadfilename=myfile.bz2"
|
|
||||||
</literallayout>
|
|
||||||
</note>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='cvs-fetcher'>
|
|
||||||
<title>CVS fetcher (<filename>(cvs://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This submodule handles checking out files from the
|
|
||||||
CVS version control system.
|
|
||||||
You can configure it using a number of different variables:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis><filename>FETCHCMD_cvs</filename>:</emphasis>
|
|
||||||
The name of the executable to use when running
|
|
||||||
the <filename>cvs</filename> command.
|
|
||||||
This name is usually "cvs".
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis><filename>SRCDATE</filename>:</emphasis>
|
|
||||||
The date to use when fetching the CVS source code.
|
|
||||||
A special value of "now" causes the checkout to
|
|
||||||
be updated on every build.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis><link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>:</emphasis>
|
|
||||||
Specifies where a temporary checkout is saved.
|
|
||||||
The location is often <filename>DL_DIR/cvs</filename>.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis><filename>CVS_PROXY_HOST</filename>:</emphasis>
|
|
||||||
The name to use as a "proxy=" parameter to the
|
|
||||||
<filename>cvs</filename> command.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis><filename>CVS_PROXY_PORT</filename>:</emphasis>
|
|
||||||
The port number to use as a "proxyport=" parameter to
|
|
||||||
the <filename>cvs</filename> command.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
As well as the standard username and password URL syntax,
|
|
||||||
you can also configure the fetcher with various URL parameters:
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The supported parameters are as follows:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis>"method":</emphasis>
|
|
||||||
The protocol over which to communicate with the CVS
|
|
||||||
server.
|
|
||||||
By default, this protocol is "pserver".
|
|
||||||
If "method" is set to "ext", BitBake examines the
|
|
||||||
"rsh" parameter and sets <filename>CVS_RSH</filename>.
|
|
||||||
You can use "dir" for local directories.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"module":</emphasis>
|
|
||||||
Specifies the module to check out.
|
|
||||||
You must supply this parameter.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"tag":</emphasis>
|
|
||||||
Describes which CVS TAG should be used for
|
|
||||||
the checkout.
|
|
||||||
By default, the TAG is empty.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"date":</emphasis>
|
|
||||||
Specifies a date.
|
|
||||||
If no "date" is specified, the
|
|
||||||
<link linkend='var-SRCDATE'><filename>SRCDATE</filename></link>
|
|
||||||
of the configuration is used to checkout a specific date.
|
|
||||||
The special value of "now" causes the checkout to be
|
|
||||||
updated on every build.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"localdir":</emphasis>
|
|
||||||
Used to rename the module.
|
|
||||||
Effectively, you are renaming the output directory
|
|
||||||
to which the module is unpacked.
|
|
||||||
You are forcing the module into a special
|
|
||||||
directory relative to
|
|
||||||
<link linkend='var-CVSDIR'><filename>CVSDIR</filename></link>.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"rsh"</emphasis>
|
|
||||||
Used in conjunction with the "method" parameter.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"scmdata":</emphasis>
|
|
||||||
Causes the CVS metadata to be maintained in the tarball
|
|
||||||
the fetcher creates when set to "keep".
|
|
||||||
The tarball is expanded into the work directory.
|
|
||||||
By default, the CVS metadata is removed.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"fullpath":</emphasis>
|
|
||||||
Controls whether the resulting checkout is at the
|
|
||||||
module level, which is the default, or is at deeper
|
|
||||||
paths.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"norecurse":</emphasis>
|
|
||||||
Causes the fetcher to only checkout the specified
|
|
||||||
directory with no recurse into any subdirectories.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"port":</emphasis>
|
|
||||||
The port to which the CVS server connects.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
Some example URLs are as follows:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
|
|
||||||
SRC_URI = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='svn-fetcher'>
|
|
||||||
<title>Subversion (SVN) Fetcher (<filename>svn://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This fetcher submodule fetches code from the
|
|
||||||
Subversion source control system.
|
|
||||||
The executable used is specified by
|
|
||||||
<filename>FETCHCMD_svn</filename>, which defaults
|
|
||||||
to "svn".
|
|
||||||
The fetcher's temporary working directory is set by
|
|
||||||
<link linkend='var-SVNDIR'><filename>SVNDIR</filename></link>,
|
|
||||||
which is usually <filename>DL_DIR/svn</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The supported parameters are as follows:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis>"module":</emphasis>
|
|
||||||
The name of the svn module to checkout.
|
|
||||||
You must provide this parameter.
|
|
||||||
You can think of this parameter as the top-level
|
|
||||||
directory of the repository data you want.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"path_spec":</emphasis>
|
|
||||||
A specific directory in which to checkout the
|
|
||||||
specified svn module.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"protocol":</emphasis>
|
|
||||||
The protocol to use, which defaults to "svn".
|
|
||||||
If "protocol" is set to "svn+ssh", the "ssh"
|
|
||||||
parameter is also used.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"rev":</emphasis>
|
|
||||||
The revision of the source code to checkout.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"scmdata":</emphasis>
|
|
||||||
Causes the “.svn” directories to be available during
|
|
||||||
compile-time when set to "keep".
|
|
||||||
By default, these directories are removed.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"ssh":</emphasis>
|
|
||||||
An optional parameter used when "protocol" is set
|
|
||||||
to "svn+ssh".
|
|
||||||
You can use this parameter to specify the ssh
|
|
||||||
program used by svn.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"transportuser":</emphasis>
|
|
||||||
When required, sets the username for the transport.
|
|
||||||
By default, this parameter is empty.
|
|
||||||
The transport username is different than the username
|
|
||||||
used in the main URL, which is passed to the subversion
|
|
||||||
command.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
Following are three examples using svn:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "svn://myrepos/proj1;module=vip;protocol=http;rev=667"
|
|
||||||
SRC_URI = "svn://myrepos/proj1;module=opie;protocol=svn+ssh"
|
|
||||||
SRC_URI = "svn://myrepos/proj1;module=trunk;protocol=http;path_spec=${MY_DIR}/proj1"
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='git-fetcher'>
|
|
||||||
<title>Git Fetcher (<filename>git://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This fetcher submodule fetches code from the Git
|
|
||||||
source control system.
|
|
||||||
The fetcher works by creating a bare clone of the
|
|
||||||
remote into
|
|
||||||
<link linkend='var-GITDIR'><filename>GITDIR</filename></link>,
|
|
||||||
which is usually <filename>DL_DIR/git2</filename>.
|
|
||||||
This bare clone is then cloned into the work directory during the
|
|
||||||
unpack stage when a specific tree is checked out.
|
|
||||||
This is done using alternates and by reference to
|
|
||||||
minimize the amount of duplicate data on the disk and
|
|
||||||
make the unpack process fast.
|
|
||||||
The executable used can be set with
|
|
||||||
<filename>FETCHCMD_git</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This fetcher supports the following parameters:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis>"protocol":</emphasis>
|
|
||||||
The protocol used to fetch the files.
|
|
||||||
The default is "git" when a hostname is set.
|
|
||||||
If a hostname is not set, the Git protocol is "file".
|
|
||||||
You can also use "http", "https", "ssh" and "rsync".
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"nocheckout":</emphasis>
|
|
||||||
Tells the fetcher to not checkout source code when
|
|
||||||
unpacking when set to "1".
|
|
||||||
Set this option for the URL where there is a custom
|
|
||||||
routine to checkout code.
|
|
||||||
The default is "0".
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"rebaseable":</emphasis>
|
|
||||||
Indicates that the upstream Git repository can be rebased.
|
|
||||||
You should set this parameter to "1" if
|
|
||||||
revisions can become detached from branches.
|
|
||||||
In this case, the source mirror tarball is done per
|
|
||||||
revision, which has a loss of efficiency.
|
|
||||||
Rebasing the upstream Git repository could cause the
|
|
||||||
current revision to disappear from the upstream repository.
|
|
||||||
This option reminds the fetcher to preserve the local cache
|
|
||||||
carefully for future use.
|
|
||||||
The default value for this parameter is "0".
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"nobranch":</emphasis>
|
|
||||||
Tells the fetcher to not check the SHA validation
|
|
||||||
for the branch when set to "1".
|
|
||||||
The default is "0".
|
|
||||||
Set this option for the recipe that refers to
|
|
||||||
the commit that is valid for a tag instead of
|
|
||||||
the branch.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"bareclone":</emphasis>
|
|
||||||
Tells the fetcher to clone a bare clone into the
|
|
||||||
destination directory without checking out a working tree.
|
|
||||||
Only the raw Git metadata is provided.
|
|
||||||
This parameter implies the "nocheckout" parameter as well.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"branch":</emphasis>
|
|
||||||
The branch(es) of the Git tree to clone.
|
|
||||||
If unset, this is assumed to be "master".
|
|
||||||
The number of branch parameters much match the number of
|
|
||||||
name parameters.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"rev":</emphasis>
|
|
||||||
The revision to use for the checkout.
|
|
||||||
The default is "master".
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"tag":</emphasis>
|
|
||||||
Specifies a tag to use for the checkout.
|
|
||||||
To correctly resolve tags, BitBake must access the
|
|
||||||
network.
|
|
||||||
For that reason, tags are often not used.
|
|
||||||
As far as Git is concerned, the "tag" parameter behaves
|
|
||||||
effectively the same as the "rev" parameter.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"subpath":</emphasis>
|
|
||||||
Limits the checkout to a specific subpath of the tree.
|
|
||||||
By default, the whole tree is checked out.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"destsuffix":</emphasis>
|
|
||||||
The name of the path in which to place the checkout.
|
|
||||||
By default, the path is <filename>git/</filename>.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>"usehead":</emphasis>
|
|
||||||
Enables local <filename>git://</filename> URLs to use the
|
|
||||||
current branch HEAD as the revision for use with
|
|
||||||
<filename>AUTOREV</filename>.
|
|
||||||
The "usehead" parameter implies no branch and only works
|
|
||||||
when the transfer protocol is
|
|
||||||
<filename>file://</filename>.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
Here are some example URLs:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
|
|
||||||
SRC_URI = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='gitsm-fetcher'>
|
|
||||||
<title>Git Submodule Fetcher (<filename>gitsm://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This fetcher submodule inherits from the
|
|
||||||
<link linkend='git-fetcher'>Git fetcher</link> and extends
|
|
||||||
that fetcher's behavior by fetching a repository's submodules.
|
|
||||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>
|
|
||||||
is passed to the Git fetcher as described in the
|
|
||||||
"<link linkend='git-fetcher'>Git Fetcher (<filename>git://</filename>)</link>"
|
|
||||||
section.
|
|
||||||
<note>
|
|
||||||
<title>Notes and Warnings</title>
|
|
||||||
<para>
|
|
||||||
You must clean a recipe when switching between
|
|
||||||
'<filename>git://</filename>' and
|
|
||||||
'<filename>gitsm://</filename>' URLs.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The Git Submodules fetcher is not a complete fetcher
|
|
||||||
implementation.
|
|
||||||
The fetcher has known issues where it does not use the
|
|
||||||
normal source mirroring infrastructure properly.
|
|
||||||
</para>
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='clearcase-fetcher'>
|
|
||||||
<title>ClearCase Fetcher (<filename>ccrc://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This fetcher submodule fetches code from a
|
|
||||||
<ulink url='http://en.wikipedia.org/wiki/Rational_ClearCase'>ClearCase</ulink>
|
|
||||||
repository.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
To use this fetcher, make sure your recipe has proper
|
|
||||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
|
||||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
|
||||||
<link linkend='var-PV'><filename>PV</filename></link> settings.
|
|
||||||
Here is an example:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
|
||||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
|
||||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
|
||||||
</literallayout>
|
|
||||||
The fetcher uses the <filename>rcleartool</filename> or
|
|
||||||
<filename>cleartool</filename> remote client, depending on
|
|
||||||
which one is available.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Following are options for the <filename>SRC_URI</filename>
|
|
||||||
statement:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis><filename>vob</filename></emphasis>:
|
|
||||||
The name, which must include the
|
|
||||||
prepending "/" character, of the ClearCase VOB.
|
|
||||||
This option is required.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis><filename>module</filename></emphasis>:
|
|
||||||
The module, which must include the
|
|
||||||
prepending "/" character, in the selected VOB.
|
|
||||||
<note>
|
|
||||||
The <filename>module</filename> and <filename>vob</filename>
|
|
||||||
options are combined to create the <filename>load</filename> rule in
|
|
||||||
the view config spec.
|
|
||||||
As an example, consider the <filename>vob</filename> and
|
|
||||||
<filename>module</filename> values from the
|
|
||||||
<filename>SRC_URI</filename> statement at the start of this section.
|
|
||||||
Combining those values results in the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
load /example_vob/example_module
|
|
||||||
</literallayout>
|
|
||||||
</note>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis><filename>proto</filename></emphasis>:
|
|
||||||
The protocol, which can be either <filename>http</filename> or
|
|
||||||
<filename>https</filename>.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
By default, the fetcher creates a configuration specification.
|
|
||||||
If you want this specification written to an area other than the default,
|
|
||||||
use the <filename>CCASE_CUSTOM_CONFIG_SPEC</filename> variable
|
|
||||||
in your recipe to define where the specification is written.
|
|
||||||
<note>
|
|
||||||
the <filename>SRCREV</filename> loses its functionality if you
|
|
||||||
specify this variable.
|
|
||||||
However, <filename>SRCREV</filename> is still used to label the
|
|
||||||
archive after a fetch even though it does not define what is
|
|
||||||
fetched.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Here are a couple of other behaviors worth mentioning:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
When using <filename>cleartool</filename>, the login of
|
|
||||||
<filename>cleartool</filename> is handled by the system.
|
|
||||||
The login require no special steps.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
In order to use <filename>rcleartool</filename> with authenticated
|
|
||||||
users, an "rcleartool login" is necessary before using the fetcher.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='perforce-fetcher'>
|
|
||||||
<title>Perforce Fetcher (<filename>p4://</filename>)</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This fetcher submodule fetches code from the
|
|
||||||
<ulink url='https://www.perforce.com/'>Perforce</ulink>
|
|
||||||
source control system.
|
|
||||||
The executable used is specified by
|
|
||||||
<filename>FETCHCMD_p4</filename>, which defaults
|
|
||||||
to "p4".
|
|
||||||
The fetcher's temporary working directory is set by
|
|
||||||
<link linkend='var-P4DIR'><filename>P4DIR</filename></link>,
|
|
||||||
which defaults to "DL_DIR/p4".
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
To use this fetcher, make sure your recipe has proper
|
|
||||||
<link linkend='var-SRC_URI'><filename>SRC_URI</filename></link>,
|
|
||||||
<link linkend='var-SRCREV'><filename>SRCREV</filename></link>, and
|
|
||||||
<link linkend='var-PV'><filename>PV</filename></link> values.
|
|
||||||
The p4 executable is able to use the config file defined by your
|
|
||||||
system's <filename>P4CONFIG</filename> environment variable in
|
|
||||||
order to define the Perforce server URL and port, username, and
|
|
||||||
password if you do not wish to keep those values in a recipe
|
|
||||||
itself.
|
|
||||||
If you choose not to use <filename>P4CONFIG</filename>,
|
|
||||||
or to explicitly set variables that <filename>P4CONFIG</filename>
|
|
||||||
can contain, you can specify the <filename>P4PORT</filename> value,
|
|
||||||
which is the server's URL and port number, and you can
|
|
||||||
specify a username and password directly in your recipe within
|
|
||||||
<filename>SRC_URI</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Here is an example that relies on <filename>P4CONFIG</filename>
|
|
||||||
to specify the server URL and port, username, and password, and
|
|
||||||
fetches the Head Revision:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
SRC_URI = "p4://example-depot/main/source/..."
|
|
||||||
SRCREV = "${AUTOREV}"
|
|
||||||
PV = "p4-${SRCPV}"
|
|
||||||
S = "${WORKDIR}/p4"
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Here is an example that specifies the server URL and port,
|
|
||||||
username, and password, and fetches a Revision based on a Label:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
P4PORT = "tcp:p4server.example.net:1666"
|
|
||||||
SRC_URI = "p4://user:passwd@example-depot/main/source/..."
|
|
||||||
SRCREV = "release-1.0"
|
|
||||||
PV = "p4-${SRCPV}"
|
|
||||||
S = "${WORKDIR}/p4"
|
|
||||||
</literallayout>
|
|
||||||
<note>
|
|
||||||
You should always set <filename>S</filename>
|
|
||||||
to <filename>"${WORKDIR}/p4"</filename> in your recipe.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='other-fetchers'>
|
|
||||||
<title>Other Fetchers</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Fetch submodules also exist for the following:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
Bazaar (<filename>bzr://</filename>)
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Trees using Git Annex (<filename>gitannex://</filename>)
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Secure FTP (<filename>sftp://</filename>)
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Secure Shell (<filename>ssh://</filename>)
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Repo (<filename>repo://</filename>)
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
OSC (<filename>osc://</filename>)
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Mercurial (<filename>hg://</filename>)
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
No documentation currently exists for these lesser used
|
|
||||||
fetcher submodules.
|
|
||||||
However, you might find the code helpful and readable.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='auto-revisions'>
|
|
||||||
<title>Auto Revisions</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
We need to document <filename>AUTOREV</filename> and
|
|
||||||
<filename>SRCREV_FORMAT</filename> here.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
</chapter>
|
|
|
@ -1,505 +0,0 @@
|
||||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
|
||||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
|
||||||
|
|
||||||
<appendix id='hello-world-example'>
|
|
||||||
<title>Hello World Example</title>
|
|
||||||
|
|
||||||
<section id='bitbake-hello-world'>
|
|
||||||
<title>BitBake Hello World</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The simplest example commonly used to demonstrate any new
|
|
||||||
programming language or tool is the
|
|
||||||
"<ulink url="http://en.wikipedia.org/wiki/Hello_world_program">Hello World</ulink>"
|
|
||||||
example.
|
|
||||||
This appendix demonstrates, in tutorial form, Hello
|
|
||||||
World within the context of BitBake.
|
|
||||||
The tutorial describes how to create a new project
|
|
||||||
and the applicable metadata files necessary to allow
|
|
||||||
BitBake to build it.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='example-obtaining-bitbake'>
|
|
||||||
<title>Obtaining BitBake</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
See the
|
|
||||||
"<link linkend='obtaining-bitbake'>Obtaining BitBake</link>"
|
|
||||||
section for information on how to obtain BitBake.
|
|
||||||
Once you have the source code on your machine, the BitBake directory
|
|
||||||
appears as follows:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ ls -al
|
|
||||||
total 100
|
|
||||||
drwxrwxr-x. 9 wmat wmat 4096 Jan 31 13:44 .
|
|
||||||
drwxrwxr-x. 3 wmat wmat 4096 Feb 4 10:45 ..
|
|
||||||
-rw-rw-r--. 1 wmat wmat 365 Nov 26 04:55 AUTHORS
|
|
||||||
drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 bin
|
|
||||||
drwxrwxr-x. 4 wmat wmat 4096 Jan 31 13:44 build
|
|
||||||
-rw-rw-r--. 1 wmat wmat 16501 Nov 26 04:55 ChangeLog
|
|
||||||
drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 classes
|
|
||||||
drwxrwxr-x. 2 wmat wmat 4096 Nov 26 04:55 conf
|
|
||||||
drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 contrib
|
|
||||||
-rw-rw-r--. 1 wmat wmat 17987 Nov 26 04:55 COPYING
|
|
||||||
drwxrwxr-x. 3 wmat wmat 4096 Nov 26 04:55 doc
|
|
||||||
-rw-rw-r--. 1 wmat wmat 69 Nov 26 04:55 .gitignore
|
|
||||||
-rw-rw-r--. 1 wmat wmat 849 Nov 26 04:55 HEADER
|
|
||||||
drwxrwxr-x. 5 wmat wmat 4096 Jan 31 13:44 lib
|
|
||||||
-rw-rw-r--. 1 wmat wmat 195 Nov 26 04:55 MANIFEST.in
|
|
||||||
-rw-rw-r--. 1 wmat wmat 2887 Nov 26 04:55 TODO
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
At this point, you should have BitBake cloned to
|
|
||||||
a directory that matches the previous listing except for
|
|
||||||
dates and user names.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='setting-up-the-bitbake-environment'>
|
|
||||||
<title>Setting Up the BitBake Environment</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
First, you need to be sure that you can run BitBake.
|
|
||||||
Set your working directory to where your local BitBake
|
|
||||||
files are and run the following command:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ ./bin/bitbake --version
|
|
||||||
BitBake Build Tool Core version 1.23.0, bitbake version 1.23.0
|
|
||||||
</literallayout>
|
|
||||||
The console output tells you what version you are running.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The recommended method to run BitBake is from a directory of your
|
|
||||||
choice.
|
|
||||||
To be able to run BitBake from any directory, you need to add the
|
|
||||||
executable binary to your binary to your shell's environment
|
|
||||||
<filename>PATH</filename> variable.
|
|
||||||
First, look at your current <filename>PATH</filename> variable
|
|
||||||
by entering the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ echo $PATH
|
|
||||||
</literallayout>
|
|
||||||
Next, add the directory location for the BitBake binary to the
|
|
||||||
<filename>PATH</filename>.
|
|
||||||
Here is an example that adds the
|
|
||||||
<filename>/home/scott-lenovo/bitbake/bin</filename> directory
|
|
||||||
to the front of the <filename>PATH</filename> variable:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ export PATH=/home/scott-lenovo/bitbake/bin:$PATH
|
|
||||||
</literallayout>
|
|
||||||
You should now be able to enter the <filename>bitbake</filename>
|
|
||||||
command from the command line while working from any directory.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='the-hello-world-example'>
|
|
||||||
<title>The Hello World Example</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The overall goal of this exercise is to build a
|
|
||||||
complete "Hello World" example utilizing task and layer
|
|
||||||
concepts.
|
|
||||||
Because this is how modern projects such as OpenEmbedded and
|
|
||||||
the Yocto Project utilize BitBake, the example
|
|
||||||
provides an excellent starting point for understanding
|
|
||||||
BitBake.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
To help you understand how to use BitBake to build targets,
|
|
||||||
the example starts with nothing but the <filename>bitbake</filename>
|
|
||||||
command, which causes BitBake to fail and report problems.
|
|
||||||
The example progresses by adding pieces to the build to
|
|
||||||
eventually conclude with a working, minimal "Hello World"
|
|
||||||
example.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
While every attempt is made to explain what is happening during
|
|
||||||
the example, the descriptions cannot cover everything.
|
|
||||||
You can find further information throughout this manual.
|
|
||||||
Also, you can actively participate in the
|
|
||||||
<ulink url='http://lists.openembedded.org/mailman/listinfo/bitbake-devel'></ulink>
|
|
||||||
discussion mailing list about the BitBake build tool.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<note>
|
|
||||||
This example was inspired by and drew heavily from these sources:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
<ulink url="http://www.mail-archive.com/yocto@yoctoproject.org/msg09379.html">Mailing List post - The BitBake equivalent of "Hello, World!"</ulink>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<ulink url="https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/">Hambedded Linux blog post - From Bitbake Hello World to an Image</ulink>
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</note>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
As stated earlier, the goal of this example
|
|
||||||
is to eventually compile "Hello World".
|
|
||||||
However, it is unknown what BitBake needs and what you have
|
|
||||||
to provide in order to achieve that goal.
|
|
||||||
Recall that BitBake utilizes three types of metadata files:
|
|
||||||
<link linkend='configuration-files'>Configuration Files</link>,
|
|
||||||
<link linkend='classes'>Classes</link>, and
|
|
||||||
<link linkend='recipes'>Recipes</link>.
|
|
||||||
But where do they go?
|
|
||||||
How does BitBake find them?
|
|
||||||
BitBake's error messaging helps you answer these types of questions
|
|
||||||
and helps you better understand exactly what is going on.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Following is the complete "Hello World" example.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<orderedlist>
|
|
||||||
<listitem><para><emphasis>Create a Project Directory:</emphasis>
|
|
||||||
First, set up a directory for the "Hello World" project.
|
|
||||||
Here is how you can do so in your home directory:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ mkdir ~/hello
|
|
||||||
$ cd ~/hello
|
|
||||||
</literallayout>
|
|
||||||
This is the directory that BitBake will use to do all of
|
|
||||||
its work.
|
|
||||||
You can use this directory to keep all the metafiles needed
|
|
||||||
by BitBake.
|
|
||||||
Having a project directory is a good way to isolate your
|
|
||||||
project.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
|
||||||
At this point, you have nothing but a project directory.
|
|
||||||
Run the <filename>bitbake</filename> command and see what
|
|
||||||
it does:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake
|
|
||||||
The BBPATH variable is not set and bitbake did not
|
|
||||||
find a conf/bblayers.conf file in the expected location.
|
|
||||||
Maybe you accidentally invoked bitbake from the wrong directory?
|
|
||||||
DEBUG: Removed the following variables from the environment:
|
|
||||||
GNOME_DESKTOP_SESSION_ID, XDG_CURRENT_DESKTOP,
|
|
||||||
GNOME_KEYRING_CONTROL, DISPLAY, SSH_AGENT_PID, LANG, no_proxy,
|
|
||||||
XDG_SESSION_PATH, XAUTHORITY, SESSION_MANAGER, SHLVL,
|
|
||||||
MANDATORY_PATH, COMPIZ_CONFIG_PROFILE, WINDOWID, EDITOR,
|
|
||||||
GPG_AGENT_INFO, SSH_AUTH_SOCK, GDMSESSION, GNOME_KEYRING_PID,
|
|
||||||
XDG_SEAT_PATH, XDG_CONFIG_DIRS, LESSOPEN, DBUS_SESSION_BUS_ADDRESS,
|
|
||||||
_, XDG_SESSION_COOKIE, DESKTOP_SESSION, LESSCLOSE, DEFAULTS_PATH,
|
|
||||||
UBUNTU_MENUPROXY, OLDPWD, XDG_DATA_DIRS, COLORTERM, LS_COLORS
|
|
||||||
</literallayout>
|
|
||||||
The majority of this output is specific to environment variables
|
|
||||||
that are not directly relevant to BitBake.
|
|
||||||
However, the very first message regarding the
|
|
||||||
<filename>BBPATH</filename> variable and the
|
|
||||||
<filename>conf/bblayers.conf</filename> file
|
|
||||||
is relevant.</para>
|
|
||||||
<para>
|
|
||||||
When you run BitBake, it begins looking for metadata files.
|
|
||||||
The
|
|
||||||
<link linkend='var-BBPATH'><filename>BBPATH</filename></link>
|
|
||||||
variable is what tells BitBake where to look for those files.
|
|
||||||
<filename>BBPATH</filename> is not set and you need to set it.
|
|
||||||
Without <filename>BBPATH</filename>, Bitbake cannot
|
|
||||||
find any configuration files (<filename>.conf</filename>)
|
|
||||||
or recipe files (<filename>.bb</filename>) at all.
|
|
||||||
BitBake also cannot find the <filename>bitbake.conf</filename>
|
|
||||||
file.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Setting <filename>BBPATH</filename>:</emphasis>
|
|
||||||
For this example, you can set <filename>BBPATH</filename>
|
|
||||||
in the same manner that you set <filename>PATH</filename>
|
|
||||||
earlier in the appendix.
|
|
||||||
You should realize, though, that it is much more flexible to set the
|
|
||||||
<filename>BBPATH</filename> variable up in a configuration
|
|
||||||
file for each project.</para>
|
|
||||||
<para>From your shell, enter the following commands to set and
|
|
||||||
export the <filename>BBPATH</filename> variable:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ BBPATH="<replaceable>projectdirectory</replaceable>"
|
|
||||||
$ export BBPATH
|
|
||||||
</literallayout>
|
|
||||||
Use your actual project directory in the command.
|
|
||||||
BitBake uses that directory to find the metadata it needs for
|
|
||||||
your project.
|
|
||||||
<note>
|
|
||||||
When specifying your project directory, do not use the
|
|
||||||
tilde ("~") character as BitBake does not expand that character
|
|
||||||
as the shell would.
|
|
||||||
</note>
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
|
||||||
Now that you have <filename>BBPATH</filename> defined, run
|
|
||||||
the <filename>bitbake</filename> command again:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake
|
|
||||||
ERROR: Traceback (most recent call last):
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
|
|
||||||
return func(fn, *args)
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 173, in parse_config_file
|
|
||||||
return bb.parse.handle(fn, data, include)
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 99, in handle
|
|
||||||
return h['handle'](fn, data, include)
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 120, in handle
|
|
||||||
abs_fn = resolve_file(fn, data)
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/__init__.py", line 117, in resolve_file
|
|
||||||
raise IOError("file %s not found in %s" % (fn, bbpath))
|
|
||||||
IOError: file conf/bitbake.conf not found in /home/scott-lenovo/hello
|
|
||||||
|
|
||||||
ERROR: Unable to parse conf/bitbake.conf: file conf/bitbake.conf not found in /home/scott-lenovo/hello
|
|
||||||
</literallayout>
|
|
||||||
This sample output shows that BitBake could not find the
|
|
||||||
<filename>conf/bitbake.conf</filename> file in the project
|
|
||||||
directory.
|
|
||||||
This file is the first thing BitBake must find in order
|
|
||||||
to build a target.
|
|
||||||
And, since the project directory for this example is
|
|
||||||
empty, you need to provide a <filename>conf/bitbake.conf</filename>
|
|
||||||
file.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Creating <filename>conf/bitbake.conf</filename>:</emphasis>
|
|
||||||
The <filename>conf/bitbake.conf</filename> includes a number of
|
|
||||||
configuration variables BitBake uses for metadata and recipe
|
|
||||||
files.
|
|
||||||
For this example, you need to create the file in your project directory
|
|
||||||
and define some key BitBake variables.
|
|
||||||
For more information on the <filename>bitbake.conf</filename>,
|
|
||||||
see
|
|
||||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#an-overview-of-bitbakeconf'></ulink>
|
|
||||||
</para>
|
|
||||||
<para>Use the following commands to create the <filename>conf</filename>
|
|
||||||
directory in the project directory:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ mkdir conf
|
|
||||||
</literallayout>
|
|
||||||
From within the <filename>conf</filename> directory, use
|
|
||||||
some editor to create the <filename>bitbake.conf</filename>
|
|
||||||
so that it contains the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
TMPDIR = "${<link linkend='var-TOPDIR'>TOPDIR</link>}/tmp"
|
|
||||||
<link linkend='var-CACHE'>CACHE</link> = "${TMPDIR}/cache"
|
|
||||||
<link linkend='var-STAMP'>STAMP</link> = "${TMPDIR}/stamps"
|
|
||||||
<link linkend='var-T'>T</link> = "${TMPDIR}/work"
|
|
||||||
<link linkend='var-B'>B</link> = "${TMPDIR}"
|
|
||||||
</literallayout>
|
|
||||||
The <filename>TMPDIR</filename> variable establishes a directory
|
|
||||||
that BitBake uses for build output and intermediate files (other
|
|
||||||
than the cached information used by the
|
|
||||||
<link linkend='setscene'>Setscene</link> process.
|
|
||||||
Here, the <filename>TMPDIR</filename> directory is set to
|
|
||||||
<filename>hello/tmp</filename>.
|
|
||||||
<note><title>Tip</title>
|
|
||||||
You can always safely delete the <filename>tmp</filename>
|
|
||||||
directory in order to rebuild a BitBake target.
|
|
||||||
The build process creates the directory for you
|
|
||||||
when you run BitBake.
|
|
||||||
</note></para>
|
|
||||||
<para>For information about each of the other variables defined in this
|
|
||||||
example, click on the links to take you to the definitions in
|
|
||||||
the glossary.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
|
||||||
After making sure that the <filename>conf/bitbake.conf</filename>
|
|
||||||
file exists, you can run the <filename>bitbake</filename>
|
|
||||||
command again:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake
|
|
||||||
ERROR: Traceback (most recent call last):
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 163, in wrapped
|
|
||||||
return func(fn, *args)
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/cookerdata.py", line 177, in _inherit
|
|
||||||
bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/BBHandler.py", line 92, in inherit
|
|
||||||
include(fn, file, lineno, d, "inherit")
|
|
||||||
File "/home/scott-lenovo/bitbake/lib/bb/parse/parse_py/ConfHandler.py", line 100, in include
|
|
||||||
raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
|
|
||||||
ParseError: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
|
|
||||||
|
|
||||||
ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inherit file classes/base.bbclass
|
|
||||||
</literallayout>
|
|
||||||
In the sample output, BitBake could not find the
|
|
||||||
<filename>classes/base.bbclass</filename> file.
|
|
||||||
You need to create that file next.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Creating <filename>classes/base.bbclass</filename>:</emphasis>
|
|
||||||
BitBake uses class files to provide common code and functionality.
|
|
||||||
The minimally required class for BitBake is the
|
|
||||||
<filename>classes/base.bbclass</filename> file.
|
|
||||||
The <filename>base</filename> class is implicitly inherited by
|
|
||||||
every recipe.
|
|
||||||
BitBake looks for the class in the <filename>classes</filename>
|
|
||||||
directory of the project (i.e <filename>hello/classes</filename>
|
|
||||||
in this example).
|
|
||||||
</para>
|
|
||||||
<para>Create the <filename>classes</filename> directory as follows:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ cd $HOME/hello
|
|
||||||
$ mkdir classes
|
|
||||||
</literallayout>
|
|
||||||
Move to the <filename>classes</filename> directory and then
|
|
||||||
create the <filename>base.bbclass</filename> file by inserting
|
|
||||||
this single line:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
addtask build
|
|
||||||
</literallayout>
|
|
||||||
The minimal task that BitBake runs is the
|
|
||||||
<filename>do_build</filename> task.
|
|
||||||
This is all the example needs in order to build the project.
|
|
||||||
Of course, the <filename>base.bbclass</filename> can have much
|
|
||||||
more depending on which build environments BitBake is
|
|
||||||
supporting.
|
|
||||||
For more information on the <filename>base.bbclass</filename> file,
|
|
||||||
you can look at
|
|
||||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#tasks'></ulink>.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Run Bitbake:</emphasis>
|
|
||||||
After making sure that the <filename>classes/base.bbclass</filename>
|
|
||||||
file exists, you can run the <filename>bitbake</filename>
|
|
||||||
command again:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake
|
|
||||||
Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.
|
|
||||||
</literallayout>
|
|
||||||
BitBake is finally reporting no errors.
|
|
||||||
However, you can see that it really does not have anything
|
|
||||||
to do.
|
|
||||||
You need to create a recipe that gives BitBake something to do.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Creating a Layer:</emphasis>
|
|
||||||
While it is not really necessary for such a small example,
|
|
||||||
it is good practice to create a layer in which to keep your
|
|
||||||
code separate from the general metadata used by BitBake.
|
|
||||||
Thus, this example creates and uses a layer called "mylayer".
|
|
||||||
<note>
|
|
||||||
You can find additional information on adding a layer at
|
|
||||||
<ulink url='https://web.archive.org/web/20150325165911/http://hambedded.org/blog/2012/11/24/from-bitbake-hello-world-to-an-image/#adding-an-example-layer'></ulink>.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
<para>Minimally, you need a recipe file and a layer configuration
|
|
||||||
file in your layer.
|
|
||||||
The configuration file needs to be in the <filename>conf</filename>
|
|
||||||
directory inside the layer.
|
|
||||||
Use these commands to set up the layer and the <filename>conf</filename>
|
|
||||||
directory:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ cd $HOME
|
|
||||||
$ mkdir mylayer
|
|
||||||
$ cd mylayer
|
|
||||||
$ mkdir conf
|
|
||||||
</literallayout>
|
|
||||||
Move to the <filename>conf</filename> directory and create a
|
|
||||||
<filename>layer.conf</filename> file that has the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
BBPATH .= ":${<link linkend='var-LAYERDIR'>LAYERDIR</link>}"
|
|
||||||
|
|
||||||
<link linkend='var-BBFILES'>BBFILES</link> += "${LAYERDIR}/*.bb"
|
|
||||||
|
|
||||||
<link linkend='var-BBFILE_COLLECTIONS'>BBFILE_COLLECTIONS</link> += "mylayer"
|
|
||||||
<link linkend='var-BBFILE_PATTERN'>BBFILE_PATTERN_mylayer</link> := "^${LAYERDIR_RE}/"
|
|
||||||
</literallayout>
|
|
||||||
For information on these variables, click the links
|
|
||||||
to go to the definitions in the glossary.</para>
|
|
||||||
<para>You need to create the recipe file next.
|
|
||||||
Inside your layer at the top-level, use an editor and create
|
|
||||||
a recipe file named <filename>printhello.bb</filename> that
|
|
||||||
has the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
<link linkend='var-DESCRIPTION'>DESCRIPTION</link> = "Prints Hello World"
|
|
||||||
<link linkend='var-PN'>PN</link> = 'printhello'
|
|
||||||
<link linkend='var-PV'>PV</link> = '1'
|
|
||||||
|
|
||||||
python do_build() {
|
|
||||||
bb.plain("********************");
|
|
||||||
bb.plain("* *");
|
|
||||||
bb.plain("* Hello, World! *");
|
|
||||||
bb.plain("* *");
|
|
||||||
bb.plain("********************");
|
|
||||||
}
|
|
||||||
</literallayout>
|
|
||||||
The recipe file simply provides a description of the
|
|
||||||
recipe, the name, version, and the <filename>do_build</filename>
|
|
||||||
task, which prints out "Hello World" to the console.
|
|
||||||
For more information on these variables, follow the links
|
|
||||||
to the glossary.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Run Bitbake With a Target:</emphasis>
|
|
||||||
Now that a BitBake target exists, run the command and provide
|
|
||||||
that target:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ cd $HOME/hello
|
|
||||||
$ bitbake printhello
|
|
||||||
ERROR: no recipe files to build, check your BBPATH and BBFILES?
|
|
||||||
|
|
||||||
Summary: There was 1 ERROR message shown, returning a non-zero exit code.
|
|
||||||
</literallayout>
|
|
||||||
We have created the layer with the recipe and the layer
|
|
||||||
configuration file but it still seems that BitBake cannot
|
|
||||||
find the recipe.
|
|
||||||
BitBake needs a <filename>conf/bblayers.conf</filename> that
|
|
||||||
lists the layers for the project.
|
|
||||||
Without this file, BitBake cannot find the recipe.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Creating <filename>conf/bblayers.conf</filename>:</emphasis>
|
|
||||||
BitBake uses the <filename>conf/bblayers.conf</filename> file
|
|
||||||
to locate layers needed for the project.
|
|
||||||
This file must reside in the <filename>conf</filename> directory
|
|
||||||
of the project (i.e. <filename>hello/conf</filename> for this
|
|
||||||
example).</para>
|
|
||||||
<para>Set your working directory to the <filename>hello/conf</filename>
|
|
||||||
directory and then create the <filename>bblayers.conf</filename>
|
|
||||||
file so that it contains the following:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
BBLAYERS ?= " \
|
|
||||||
/home/<you>/mylayer \
|
|
||||||
"
|
|
||||||
</literallayout>
|
|
||||||
You need to provide your own information for
|
|
||||||
<filename>you</filename> in the file.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Run Bitbake With a Target:</emphasis>
|
|
||||||
Now that you have supplied the <filename>bblayers.conf</filename>
|
|
||||||
file, run the <filename>bitbake</filename> command and provide
|
|
||||||
the target:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake printhello
|
|
||||||
Parsing recipes: 100% |##################################################################################|
|
|
||||||
Time: 00:00:00
|
|
||||||
Parsing of 1 .bb files complete (0 cached, 1 parsed). 1 targets, 0 skipped, 0 masked, 0 errors.
|
|
||||||
NOTE: Resolving any missing task queue dependencies
|
|
||||||
NOTE: Preparing RunQueue
|
|
||||||
NOTE: Executing RunQueue Tasks
|
|
||||||
********************
|
|
||||||
* *
|
|
||||||
* Hello, World! *
|
|
||||||
* *
|
|
||||||
********************
|
|
||||||
NOTE: Tasks Summary: Attempted 1 tasks of which 0 didn't need to be rerun and all succeeded.
|
|
||||||
</literallayout>
|
|
||||||
BitBake finds the <filename>printhello</filename> recipe and
|
|
||||||
successfully runs the task.
|
|
||||||
<note>
|
|
||||||
After the first execution, re-running
|
|
||||||
<filename>bitbake printhello</filename> again will not
|
|
||||||
result in a BitBake run that prints the same console
|
|
||||||
output.
|
|
||||||
The reason for this is that the first time the
|
|
||||||
<filename>printhello.bb</filename> recipe's
|
|
||||||
<filename>do_build</filename> task executes
|
|
||||||
successfully, BitBake writes a stamp file for the task.
|
|
||||||
Thus, the next time you attempt to run the task
|
|
||||||
using that same <filename>bitbake</filename> command,
|
|
||||||
BitBake notices the stamp and therefore determines
|
|
||||||
that the task does not need to be re-run.
|
|
||||||
If you delete the <filename>tmp</filename> directory
|
|
||||||
or run <filename>bitbake -c clean printhello</filename>
|
|
||||||
and then re-run the build, the "Hello, World!" message will
|
|
||||||
be printed again.
|
|
||||||
</note>
|
|
||||||
</para></listitem>
|
|
||||||
</orderedlist>
|
|
||||||
</section>
|
|
||||||
</appendix>
|
|
|
@ -1,721 +0,0 @@
|
||||||
<!DOCTYPE chapter PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
|
||||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
|
||||||
|
|
||||||
<chapter id="bitbake-user-manual-intro">
|
|
||||||
<title>Overview</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Welcome to the BitBake User Manual.
|
|
||||||
This manual provides information on the BitBake tool.
|
|
||||||
The information attempts to be as independent as possible regarding
|
|
||||||
systems that use BitBake, such as OpenEmbedded and the
|
|
||||||
Yocto Project.
|
|
||||||
In some cases, scenarios or examples within the context of
|
|
||||||
a build system are used in the manual to help with understanding.
|
|
||||||
For these cases, the manual clearly states the context.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<section id="intro">
|
|
||||||
<title>Introduction</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Fundamentally, BitBake is a generic task execution
|
|
||||||
engine that allows shell and Python tasks to be run
|
|
||||||
efficiently and in parallel while working within
|
|
||||||
complex inter-task dependency constraints.
|
|
||||||
One of BitBake's main users, OpenEmbedded, takes this core
|
|
||||||
and builds embedded Linux software stacks using
|
|
||||||
a task-oriented approach.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Conceptually, BitBake is similar to GNU Make in
|
|
||||||
some regards but has significant differences:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
BitBake executes tasks according to provided
|
|
||||||
metadata that builds up the tasks.
|
|
||||||
Metadata is stored in recipe (<filename>.bb</filename>)
|
|
||||||
and related recipe "append" (<filename>.bbappend</filename>)
|
|
||||||
files, configuration (<filename>.conf</filename>) and
|
|
||||||
underlying include (<filename>.inc</filename>) files, and
|
|
||||||
in class (<filename>.bbclass</filename>) files.
|
|
||||||
The metadata provides
|
|
||||||
BitBake with instructions on what tasks to run and
|
|
||||||
the dependencies between those tasks.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
BitBake includes a fetcher library for obtaining source
|
|
||||||
code from various places such as local files, source control
|
|
||||||
systems, or websites.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
The instructions for each unit to be built (e.g. a piece
|
|
||||||
of software) are known as "recipe" files and
|
|
||||||
contain all the information about the unit
|
|
||||||
(dependencies, source file locations, checksums, description
|
|
||||||
and so on).
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
BitBake includes a client/server abstraction and can
|
|
||||||
be used from a command line or used as a service over
|
|
||||||
XML-RPC and has several different user interfaces.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id="history-and-goals">
|
|
||||||
<title>History and Goals</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake was originally a part of the OpenEmbedded project.
|
|
||||||
It was inspired by the Portage package management system
|
|
||||||
used by the Gentoo Linux distribution.
|
|
||||||
On December 7, 2004, OpenEmbedded project team member
|
|
||||||
Chris Larson split the project into two distinct pieces:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>BitBake, a generic task executor</para></listitem>
|
|
||||||
<listitem><para>OpenEmbedded, a metadata set utilized by
|
|
||||||
BitBake</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
Today, BitBake is the primary basis of the
|
|
||||||
<ulink url="http://www.openembedded.org/">OpenEmbedded</ulink>
|
|
||||||
project, which is being used to build and maintain Linux
|
|
||||||
distributions such as the
|
|
||||||
<ulink url='http://www.angstrom-distribution.org/'>Angstrom Distribution</ulink>,
|
|
||||||
and which is also being used as the build tool for Linux projects
|
|
||||||
such as the
|
|
||||||
<ulink url='http://www.yoctoproject.org'>Yocto Project</ulink>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Prior to BitBake, no other build tool adequately met the needs of
|
|
||||||
an aspiring embedded Linux distribution.
|
|
||||||
All of the build systems used by traditional desktop Linux
|
|
||||||
distributions lacked important functionality, and none of the
|
|
||||||
ad hoc Buildroot-based systems, prevalent in the
|
|
||||||
embedded space, were scalable or maintainable.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Some important original goals for BitBake were:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
Handle cross-compilation.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Handle inter-package dependencies (build time on
|
|
||||||
target architecture, build time on native
|
|
||||||
architecture, and runtime).
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Support running any number of tasks within a given
|
|
||||||
package, including, but not limited to, fetching
|
|
||||||
upstream sources, unpacking them, patching them,
|
|
||||||
configuring them, and so forth.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Be Linux distribution agnostic for both build and
|
|
||||||
target systems.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Be architecture agnostic.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Support multiple build and target operating systems
|
|
||||||
(e.g. Cygwin, the BSDs, and so forth).
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Be self contained, rather than tightly
|
|
||||||
integrated into the build machine's root
|
|
||||||
filesystem.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Handle conditional metadata on the target architecture,
|
|
||||||
operating system, distribution, and machine.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Be easy to use the tools to supply local metadata and packages
|
|
||||||
against which to operate.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Be easy to use BitBake to collaborate between multiple
|
|
||||||
projects for their builds.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Provide an inheritance mechanism to share
|
|
||||||
common metadata between many packages.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
Over time it became apparent that some further requirements
|
|
||||||
were necessary:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
Handle variants of a base recipe (e.g. native, sdk,
|
|
||||||
and multilib).
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Split metadata into layers and allow layers
|
|
||||||
to enhance or override other layers.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
Allow representation of a given set of input variables
|
|
||||||
to a task as a checksum.
|
|
||||||
Based on that checksum, allow acceleration of builds
|
|
||||||
with prebuilt components.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
BitBake satisfies all the original requirements and many more
|
|
||||||
with extensions being made to the basic functionality to
|
|
||||||
reflect the additional requirements.
|
|
||||||
Flexibility and power have always been the priorities.
|
|
||||||
BitBake is highly extensible and supports embedded Python code and
|
|
||||||
execution of any arbitrary tasks.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id="Concepts">
|
|
||||||
<title>Concepts</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake is a program written in the Python language.
|
|
||||||
At the highest level, BitBake interprets metadata, decides
|
|
||||||
what tasks are required to run, and executes those tasks.
|
|
||||||
Similar to GNU Make, BitBake controls how software is
|
|
||||||
built.
|
|
||||||
GNU Make achieves its control through "makefiles", while
|
|
||||||
BitBake uses "recipes".
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake extends the capabilities of a simple
|
|
||||||
tool like GNU Make by allowing for the definition of much more
|
|
||||||
complex tasks, such as assembling entire embedded Linux
|
|
||||||
distributions.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The remainder of this section introduces several concepts
|
|
||||||
that should be understood in order to better leverage
|
|
||||||
the power of BitBake.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<section id='recipes'>
|
|
||||||
<title>Recipes</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake Recipes, which are denoted by the file extension
|
|
||||||
<filename>.bb</filename>, are the most basic metadata files.
|
|
||||||
These recipe files provide BitBake with the following:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>Descriptive information about the
|
|
||||||
package (author, homepage, license, and so on)</para></listitem>
|
|
||||||
<listitem><para>The version of the recipe</para></listitem>
|
|
||||||
<listitem><para>Existing dependencies (both build
|
|
||||||
and runtime dependencies)</para></listitem>
|
|
||||||
<listitem><para>Where the source code resides and
|
|
||||||
how to fetch it</para></listitem>
|
|
||||||
<listitem><para>Whether the source code requires
|
|
||||||
any patches, where to find them, and how to apply
|
|
||||||
them</para></listitem>
|
|
||||||
<listitem><para>How to configure and compile the
|
|
||||||
source code</para></listitem>
|
|
||||||
<listitem><para>Where on the target machine to install the
|
|
||||||
package or packages created</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Within the context of BitBake, or any project utilizing BitBake
|
|
||||||
as its build system, files with the <filename>.bb</filename>
|
|
||||||
extension are referred to as recipes.
|
|
||||||
<note>
|
|
||||||
The term "package" is also commonly used to describe recipes.
|
|
||||||
However, since the same word is used to describe packaged
|
|
||||||
output from a project, it is best to maintain a single
|
|
||||||
descriptive term - "recipes".
|
|
||||||
Put another way, a single "recipe" file is quite capable
|
|
||||||
of generating a number of related but separately installable
|
|
||||||
"packages".
|
|
||||||
In fact, that ability is fairly common.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='configuration-files'>
|
|
||||||
<title>Configuration Files</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Configuration files, which are denoted by the
|
|
||||||
<filename>.conf</filename> extension, define
|
|
||||||
various configuration variables that govern the project's build
|
|
||||||
process.
|
|
||||||
These files fall into several areas that define
|
|
||||||
machine configuration options, distribution configuration
|
|
||||||
options, compiler tuning options, general common
|
|
||||||
configuration options, and user configuration options.
|
|
||||||
The main configuration file is the sample
|
|
||||||
<filename>bitbake.conf</filename> file, which is
|
|
||||||
located within the BitBake source tree
|
|
||||||
<filename>conf</filename> directory.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='classes'>
|
|
||||||
<title>Classes</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Class files, which are denoted by the
|
|
||||||
<filename>.bbclass</filename> extension, contain
|
|
||||||
information that is useful to share between metadata files.
|
|
||||||
The BitBake source tree currently comes with one class metadata file
|
|
||||||
called <filename>base.bbclass</filename>.
|
|
||||||
You can find this file in the
|
|
||||||
<filename>classes</filename> directory.
|
|
||||||
The <filename>base.bbclass</filename> class files is special since it
|
|
||||||
is always included automatically for all recipes
|
|
||||||
and classes.
|
|
||||||
This class contains definitions for standard basic tasks such
|
|
||||||
as fetching, unpacking, configuring (empty by default),
|
|
||||||
compiling (runs any Makefile present), installing (empty by
|
|
||||||
default) and packaging (empty by default).
|
|
||||||
These tasks are often overridden or extended by other classes
|
|
||||||
added during the project development process.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='layers'>
|
|
||||||
<title>Layers</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Layers allow you to isolate different types of
|
|
||||||
customizations from each other.
|
|
||||||
While you might find it tempting to keep everything in one layer
|
|
||||||
when working on a single project, the more modular you organize
|
|
||||||
your metadata, the easier it is to cope with future changes.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
To illustrate how you can use layers to keep things modular,
|
|
||||||
consider customizations you might make to support a specific target machine.
|
|
||||||
These types of customizations typically reside in a special layer,
|
|
||||||
rather than a general layer, called a Board Support Package (BSP)
|
|
||||||
Layer.
|
|
||||||
Furthermore, the machine customizations should be isolated from
|
|
||||||
recipes and metadata that support a new GUI environment, for
|
|
||||||
example.
|
|
||||||
This situation gives you a couple of layers: one for the machine
|
|
||||||
configurations and one for the GUI environment.
|
|
||||||
It is important to understand, however, that the BSP layer can still
|
|
||||||
make machine-specific additions to recipes within
|
|
||||||
the GUI environment layer without polluting the GUI layer itself
|
|
||||||
with those machine-specific changes.
|
|
||||||
You can accomplish this through a recipe that is a BitBake append
|
|
||||||
(<filename>.bbappend</filename>) file.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='append-bbappend-files'>
|
|
||||||
<title>Append Files</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Append files, which are files that have the
|
|
||||||
<filename>.bbappend</filename> file extension, extend or
|
|
||||||
override information in an existing recipe file.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake expects every append file to have a corresponding recipe file.
|
|
||||||
Furthermore, the append file and corresponding recipe file
|
|
||||||
must use the same root filename.
|
|
||||||
The filenames can differ only in the file type suffix used
|
|
||||||
(e.g. <filename>formfactor_0.0.bb</filename> and
|
|
||||||
<filename>formfactor_0.0.bbappend</filename>).
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Information in append files extends or
|
|
||||||
overrides the information in the underlying,
|
|
||||||
similarly-named recipe files.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
When you name an append file, you can use the
|
|
||||||
wildcard character (%) to allow for matching recipe names.
|
|
||||||
For example, suppose you have an append file named
|
|
||||||
as follows:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
busybox_1.21.%.bbappend
|
|
||||||
</literallayout>
|
|
||||||
That append file would match any <filename>busybox_1.21.x.bb</filename>
|
|
||||||
version of the recipe.
|
|
||||||
So, the append file would match the following recipe names:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
busybox_1.21.1.bb
|
|
||||||
busybox_1.21.2.bb
|
|
||||||
busybox_1.21.3.bb
|
|
||||||
</literallayout>
|
|
||||||
If the <filename>busybox</filename> recipe was updated to
|
|
||||||
<filename>busybox_1.3.0.bb</filename>, the append name would not
|
|
||||||
match.
|
|
||||||
However, if you named the append file
|
|
||||||
<filename>busybox_1.%.bbappend</filename>, then you would have a match.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
In the most general case, you could name the append file something as
|
|
||||||
simple as <filename>busybox_%.bbappend</filename> to be entirely
|
|
||||||
version independent.
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='obtaining-bitbake'>
|
|
||||||
<title>Obtaining BitBake</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
You can obtain BitBake several different ways:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para><emphasis>Cloning BitBake:</emphasis>
|
|
||||||
Using Git to clone the BitBake source code repository
|
|
||||||
is the recommended method for obtaining BitBake.
|
|
||||||
Cloning the repository makes it easy to get bug fixes
|
|
||||||
and have access to stable branches and the master
|
|
||||||
branch.
|
|
||||||
Once you have cloned BitBake, you should use
|
|
||||||
the latest stable
|
|
||||||
branch for development since the master branch is for
|
|
||||||
BitBake development and might contain less stable changes.
|
|
||||||
</para>
|
|
||||||
<para>You usually need a version of BitBake
|
|
||||||
that matches the metadata you are using.
|
|
||||||
The metadata is generally backwards compatible but
|
|
||||||
not forward compatible.</para>
|
|
||||||
<para>Here is an example that clones the BitBake repository:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ git clone git://git.openembedded.org/bitbake
|
|
||||||
</literallayout>
|
|
||||||
This command clones the BitBake Git repository into a
|
|
||||||
directory called <filename>bitbake</filename>.
|
|
||||||
Alternatively, you can
|
|
||||||
designate a directory after the
|
|
||||||
<filename>git clone</filename> command
|
|
||||||
if you want to call the new directory something
|
|
||||||
other than <filename>bitbake</filename>.
|
|
||||||
Here is an example that names the directory
|
|
||||||
<filename>bbdev</filename>:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ git clone git://git.openembedded.org/bitbake bbdev
|
|
||||||
</literallayout></para></listitem>
|
|
||||||
<listitem><para><emphasis>Installation using your Distribution
|
|
||||||
Package Management System:</emphasis>
|
|
||||||
This method is not
|
|
||||||
recommended because the BitBake version that is
|
|
||||||
provided by your distribution, in most cases,
|
|
||||||
is several
|
|
||||||
releases behind a snapshot of the BitBake repository.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Taking a snapshot of BitBake:</emphasis>
|
|
||||||
Downloading a snapshot of BitBake from the
|
|
||||||
source code repository gives you access to a known
|
|
||||||
branch or release of BitBake.
|
|
||||||
<note>
|
|
||||||
Cloning the Git repository, as described earlier,
|
|
||||||
is the preferred method for getting BitBake.
|
|
||||||
Cloning the repository makes it easier to update as
|
|
||||||
patches are added to the stable branches.
|
|
||||||
</note></para>
|
|
||||||
<para>The following example downloads a snapshot of
|
|
||||||
BitBake version 1.17.0:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ wget http://git.openembedded.org/bitbake/snapshot/bitbake-1.17.0.tar.gz
|
|
||||||
$ tar zxpvf bitbake-1.17.0.tar.gz
|
|
||||||
</literallayout>
|
|
||||||
After extraction of the tarball using the tar utility,
|
|
||||||
you have a directory entitled
|
|
||||||
<filename>bitbake-1.17.0</filename>.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para><emphasis>Using the BitBake that Comes With Your
|
|
||||||
Build Checkout:</emphasis>
|
|
||||||
A final possibility for getting a copy of BitBake is that it
|
|
||||||
already comes with your checkout of a larger Bitbake-based build
|
|
||||||
system, such as Poky or Yocto Project.
|
|
||||||
Rather than manually checking out individual layers and
|
|
||||||
gluing them together yourself, you can check
|
|
||||||
out an entire build system.
|
|
||||||
The checkout will already include a version of BitBake that
|
|
||||||
has been thoroughly tested for compatibility with the other
|
|
||||||
components.
|
|
||||||
For information on how to check out a particular BitBake-based
|
|
||||||
build system, consult that build system's supporting documentation.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id="bitbake-user-manual-command">
|
|
||||||
<title>The BitBake Command</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The <filename>bitbake</filename> command is the primary interface
|
|
||||||
to the BitBake tool.
|
|
||||||
This section presents the BitBake command syntax and provides
|
|
||||||
several execution examples.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<section id='usage-and-syntax'>
|
|
||||||
<title>Usage and syntax</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Following is the usage and syntax for BitBake:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake -h
|
|
||||||
Usage: bitbake [options] [recipename/target recipe:do_task ...]
|
|
||||||
|
|
||||||
Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
|
|
||||||
It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
|
|
||||||
will provide the layer, BBFILES and other configuration information.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--version show program's version number and exit
|
|
||||||
-h, --help show this help message and exit
|
|
||||||
-b BUILDFILE, --buildfile=BUILDFILE
|
|
||||||
Execute tasks from a specific .bb recipe directly.
|
|
||||||
WARNING: Does not handle any dependencies from other
|
|
||||||
recipes.
|
|
||||||
-k, --continue Continue as much as possible after an error. While the
|
|
||||||
target that failed and anything depending on it cannot
|
|
||||||
be built, as much as possible will be built before
|
|
||||||
stopping.
|
|
||||||
-a, --tryaltconfigs Continue with builds by trying to use alternative
|
|
||||||
providers where possible.
|
|
||||||
-f, --force Force the specified targets/task to run (invalidating
|
|
||||||
any existing stamp file).
|
|
||||||
-c CMD, --cmd=CMD Specify the task to execute. The exact options
|
|
||||||
available depend on the metadata. Some examples might
|
|
||||||
be 'compile' or 'populate_sysroot' or 'listtasks' may
|
|
||||||
give a list of the tasks available.
|
|
||||||
-C INVALIDATE_STAMP, --clear-stamp=INVALIDATE_STAMP
|
|
||||||
Invalidate the stamp for the specified task such as
|
|
||||||
'compile' and then run the default task for the
|
|
||||||
specified target(s).
|
|
||||||
-r PREFILE, --read=PREFILE
|
|
||||||
Read the specified file before bitbake.conf.
|
|
||||||
-R POSTFILE, --postread=POSTFILE
|
|
||||||
Read the specified file after bitbake.conf.
|
|
||||||
-v, --verbose Enable tracing of shell tasks (with 'set -x').
|
|
||||||
Also print bb.note(...) messages to stdout (in
|
|
||||||
addition to writing them to ${T}/log.do_<task>).
|
|
||||||
-D, --debug Increase the debug level. You can specify this
|
|
||||||
more than once. -D sets the debug level to 1,
|
|
||||||
where only bb.debug(1, ...) messages are printed
|
|
||||||
to stdout; -DD sets the debug level to 2, where
|
|
||||||
both bb.debug(1, ...) and bb.debug(2, ...)
|
|
||||||
messages are printed; etc. Without -D, no debug
|
|
||||||
messages are printed. Note that -D only affects
|
|
||||||
output to stdout. All debug messages are written
|
|
||||||
to ${T}/log.do_taskname, regardless of the debug
|
|
||||||
level.
|
|
||||||
-n, --dry-run Don't execute, just go through the motions.
|
|
||||||
-S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER
|
|
||||||
Dump out the signature construction information, with
|
|
||||||
no task execution. The SIGNATURE_HANDLER parameter is
|
|
||||||
passed to the handler. Two common values are none and
|
|
||||||
printdiff but the handler may define more/less. none
|
|
||||||
means only dump the signature, printdiff means compare
|
|
||||||
the dumped signature with the cached one.
|
|
||||||
-p, --parse-only Quit after parsing the BB recipes.
|
|
||||||
-s, --show-versions Show current and preferred versions of all recipes.
|
|
||||||
-e, --environment Show the global or per-recipe environment complete
|
|
||||||
with information about where variables were
|
|
||||||
set/changed.
|
|
||||||
-g, --graphviz Save dependency tree information for the specified
|
|
||||||
targets in the dot syntax.
|
|
||||||
-I EXTRA_ASSUME_PROVIDED, --ignore-deps=EXTRA_ASSUME_PROVIDED
|
|
||||||
Assume these dependencies don't exist and are already
|
|
||||||
provided (equivalent to ASSUME_PROVIDED). Useful to
|
|
||||||
make dependency graphs more appealing
|
|
||||||
-l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
|
|
||||||
Show debug logging for the specified logging domains
|
|
||||||
-P, --profile Profile the command and save reports.
|
|
||||||
-u UI, --ui=UI The user interface to use (taskexp, knotty or
|
|
||||||
ncurses - default knotty).
|
|
||||||
-t SERVERTYPE, --servertype=SERVERTYPE
|
|
||||||
Choose which server type to use (process or xmlrpc -
|
|
||||||
default process).
|
|
||||||
--token=XMLRPCTOKEN Specify the connection token to be used when
|
|
||||||
connecting to a remote server.
|
|
||||||
--revisions-changed Set the exit code depending on whether upstream
|
|
||||||
floating revisions have changed or not.
|
|
||||||
--server-only Run bitbake without a UI, only starting a server
|
|
||||||
(cooker) process.
|
|
||||||
-B BIND, --bind=BIND The name/address for the bitbake server to bind to.
|
|
||||||
--no-setscene Do not run any setscene tasks. sstate will be ignored
|
|
||||||
and everything needed, built.
|
|
||||||
--setscene-only Only run setscene tasks, don't run any real tasks.
|
|
||||||
--remote-server=REMOTE_SERVER
|
|
||||||
Connect to the specified server.
|
|
||||||
-m, --kill-server Terminate the remote server.
|
|
||||||
--observe-only Connect to a server as an observing-only client.
|
|
||||||
--status-only Check the status of the remote bitbake server.
|
|
||||||
-w WRITEEVENTLOG, --write-log=WRITEEVENTLOG
|
|
||||||
Writes the event log of the build to a bitbake event
|
|
||||||
json file. Use '' (empty string) to assign the name
|
|
||||||
automatically.
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='bitbake-examples'>
|
|
||||||
<title>Examples</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
This section presents some examples showing how to use BitBake.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<section id='example-executing-a-task-against-a-single-recipe'>
|
|
||||||
<title>Executing a Task Against a Single Recipe</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Executing tasks for a single recipe file is relatively simple.
|
|
||||||
You specify the file in question, and BitBake parses
|
|
||||||
it and executes the specified task.
|
|
||||||
If you do not specify a task, BitBake executes the default
|
|
||||||
task, which is "build”.
|
|
||||||
BitBake obeys inter-task dependencies when doing
|
|
||||||
so.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The following command runs the build task, which is
|
|
||||||
the default task, on the <filename>foo_1.0.bb</filename>
|
|
||||||
recipe file:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake -b foo_1.0.bb
|
|
||||||
</literallayout>
|
|
||||||
The following command runs the clean task on the
|
|
||||||
<filename>foo.bb</filename> recipe file:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake -b foo.bb -c clean
|
|
||||||
</literallayout>
|
|
||||||
<note>
|
|
||||||
The "-b" option explicitly does not handle recipe
|
|
||||||
dependencies.
|
|
||||||
Other than for debugging purposes, it is instead
|
|
||||||
recommended that you use the syntax presented in the
|
|
||||||
next section.
|
|
||||||
</note>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='executing-tasks-against-a-set-of-recipe-files'>
|
|
||||||
<title>Executing Tasks Against a Set of Recipe Files</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
There are a number of additional complexities introduced
|
|
||||||
when one wants to manage multiple <filename>.bb</filename>
|
|
||||||
files.
|
|
||||||
Clearly there needs to be a way to tell BitBake what
|
|
||||||
files are available and, of those, which you
|
|
||||||
want to execute.
|
|
||||||
There also needs to be a way for each recipe
|
|
||||||
to express its dependencies, both for build-time and
|
|
||||||
runtime.
|
|
||||||
There must be a way for you to express recipe preferences
|
|
||||||
when multiple recipes provide the same functionality, or when
|
|
||||||
there are multiple versions of a recipe.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The <filename>bitbake</filename> command, when not using
|
|
||||||
"--buildfile" or "-b" only accepts a "PROVIDES".
|
|
||||||
You cannot provide anything else.
|
|
||||||
By default, a recipe file generally "PROVIDES" its
|
|
||||||
"packagename" as shown in the following example:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake foo
|
|
||||||
</literallayout>
|
|
||||||
This next example "PROVIDES" the package name and also uses
|
|
||||||
the "-c" option to tell BitBake to just execute the
|
|
||||||
<filename>do_clean</filename> task:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake -c clean foo
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='executing-a-list-of-task-and-recipe-combinations'>
|
|
||||||
<title>Executing a List of Task and Recipe Combinations</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
The BitBake command line supports specifying different
|
|
||||||
tasks for individual targets when you specify multiple
|
|
||||||
targets.
|
|
||||||
For example, suppose you had two targets (or recipes)
|
|
||||||
<filename>myfirstrecipe</filename> and
|
|
||||||
<filename>mysecondrecipe</filename> and you needed
|
|
||||||
BitBake to run <filename>taskA</filename> for the first
|
|
||||||
recipe and <filename>taskB</filename> for the second
|
|
||||||
recipe:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake myfirstrecipe:do_taskA mysecondrecipe:do_taskB
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
|
|
||||||
<section id='generating-dependency-graphs'>
|
|
||||||
<title>Generating Dependency Graphs</title>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
BitBake is able to generate dependency graphs using
|
|
||||||
the <filename>dot</filename> syntax.
|
|
||||||
You can convert these graphs into images using the
|
|
||||||
<filename>dot</filename> tool from
|
|
||||||
<ulink url='http://www.graphviz.org'>Graphviz</ulink>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
When you generate a dependency graph, BitBake writes three files
|
|
||||||
to the current working directory:
|
|
||||||
<itemizedlist>
|
|
||||||
<listitem><para>
|
|
||||||
<emphasis><filename>recipe-depends.dot</filename>:</emphasis>
|
|
||||||
Shows dependencies between recipes (i.e. a collapsed version of
|
|
||||||
<filename>task-depends.dot</filename>).
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<emphasis><filename>task-depends.dot</filename>:</emphasis>
|
|
||||||
Shows dependencies between tasks.
|
|
||||||
These dependencies match BitBake's internal task execution list.
|
|
||||||
</para></listitem>
|
|
||||||
<listitem><para>
|
|
||||||
<emphasis><filename>pn-buildlist</filename>:</emphasis>
|
|
||||||
Shows a simple list of targets that are to be built.
|
|
||||||
</para></listitem>
|
|
||||||
</itemizedlist>
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
To stop depending on common depends, use the "-I" depend
|
|
||||||
option and BitBake omits them from the graph.
|
|
||||||
Leaving this information out can produce more readable graphs.
|
|
||||||
This way, you can remove from the graph
|
|
||||||
<filename>DEPENDS</filename> from inherited classes
|
|
||||||
such as <filename>base.bbclass</filename>.
|
|
||||||
</para>
|
|
||||||
|
|
||||||
<para>
|
|
||||||
Here are two examples that create dependency graphs.
|
|
||||||
The second example omits depends common in OpenEmbedded from
|
|
||||||
the graph:
|
|
||||||
<literallayout class='monospaced'>
|
|
||||||
$ bitbake -g foo
|
|
||||||
|
|
||||||
$ bitbake -g -I virtual/kernel -I eglibc foo
|
|
||||||
</literallayout>
|
|
||||||
</para>
|
|
||||||
</section>
|
|
||||||
</section>
|
|
||||||
</section>
|
|
||||||
</chapter>
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,984 +0,0 @@
|
||||||
/*
|
|
||||||
Generic XHTML / DocBook XHTML CSS Stylesheet.
|
|
||||||
|
|
||||||
Browser wrangling and typographic design by
|
|
||||||
Oyvind Kolas / pippin@gimp.org
|
|
||||||
|
|
||||||
Customised for Poky by
|
|
||||||
Matthew Allum / mallum@o-hand.com
|
|
||||||
|
|
||||||
Thanks to:
|
|
||||||
Liam R. E. Quin
|
|
||||||
William Skaggs
|
|
||||||
Jakub Steiner
|
|
||||||
|
|
||||||
Structure
|
|
||||||
---------
|
|
||||||
|
|
||||||
The stylesheet is divided into the following sections:
|
|
||||||
|
|
||||||
Positioning
|
|
||||||
Margins, paddings, width, font-size, clearing.
|
|
||||||
Decorations
|
|
||||||
Borders, style
|
|
||||||
Colors
|
|
||||||
Colors
|
|
||||||
Graphics
|
|
||||||
Graphical backgrounds
|
|
||||||
Nasty IE tweaks
|
|
||||||
Workarounds needed to make it work in internet explorer,
|
|
||||||
currently makes the stylesheet non validating, but up until
|
|
||||||
this point it is validating.
|
|
||||||
Mozilla extensions
|
|
||||||
Transparency for footer
|
|
||||||
Rounded corners on boxes
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
/*************** /
|
|
||||||
/ Positioning /
|
|
||||||
/ ***************/
|
|
||||||
|
|
||||||
body {
|
|
||||||
font-family: Verdana, Sans, sans-serif;
|
|
||||||
|
|
||||||
min-width: 640px;
|
|
||||||
width: 80%;
|
|
||||||
margin: 0em auto;
|
|
||||||
padding: 2em 5em 5em 5em;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1,h2,h3,h4,h5,h6,h7 {
|
|
||||||
font-family: Arial, Sans;
|
|
||||||
color: #00557D;
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
h1 {
|
|
||||||
font-size: 2em;
|
|
||||||
text-align: left;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
margin: 2em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
h2.subtitle {
|
|
||||||
margin: 0.10em 0em 3.0em 0em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
font-size: 1.8em;
|
|
||||||
padding-left: 20%;
|
|
||||||
font-weight: normal;
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
h2 {
|
|
||||||
margin: 2em 0em 0.66em 0em;
|
|
||||||
padding: 0.5em 0em 0em 0em;
|
|
||||||
font-size: 1.5em;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
h3.subtitle {
|
|
||||||
margin: 0em 0em 1em 0em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
font-size: 142.14%;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
h3 {
|
|
||||||
margin: 1em 0em 0.5em 0em;
|
|
||||||
padding: 1em 0em 0em 0em;
|
|
||||||
font-size: 140%;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
h4 {
|
|
||||||
margin: 1em 0em 0.5em 0em;
|
|
||||||
padding: 1em 0em 0em 0em;
|
|
||||||
font-size: 120%;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
h5 {
|
|
||||||
margin: 1em 0em 0.5em 0em;
|
|
||||||
padding: 1em 0em 0em 0em;
|
|
||||||
font-size: 110%;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
h6 {
|
|
||||||
margin: 1em 0em 0em 0em;
|
|
||||||
padding: 1em 0em 0em 0em;
|
|
||||||
font-size: 110%;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.authorgroup {
|
|
||||||
background-color: transparent;
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
padding-top: 256px;
|
|
||||||
background-image: url("figures/bitbake-title.png");
|
|
||||||
background-position: left top;
|
|
||||||
margin-top: -256px;
|
|
||||||
padding-right: 50px;
|
|
||||||
margin-left: 0px;
|
|
||||||
text-align: right;
|
|
||||||
width: 740px;
|
|
||||||
}
|
|
||||||
|
|
||||||
h3.author {
|
|
||||||
margin: 0em 0me 0em 0em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
font-weight: normal;
|
|
||||||
font-size: 100%;
|
|
||||||
color: #333;
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
.author tt.email {
|
|
||||||
font-size: 66%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.titlepage hr {
|
|
||||||
width: 0em;
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
.revhistory {
|
|
||||||
padding-top: 2em;
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
.toc,
|
|
||||||
.list-of-tables,
|
|
||||||
.list-of-examples,
|
|
||||||
.list-of-figures {
|
|
||||||
padding: 1.33em 0em 2.5em 0em;
|
|
||||||
color: #00557D;
|
|
||||||
}
|
|
||||||
|
|
||||||
.toc p,
|
|
||||||
.list-of-tables p,
|
|
||||||
.list-of-figures p,
|
|
||||||
.list-of-examples p {
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
padding: 0em 0em 0.3em;
|
|
||||||
margin: 1.5em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.toc p b,
|
|
||||||
.list-of-tables p b,
|
|
||||||
.list-of-figures p b,
|
|
||||||
.list-of-examples p b{
|
|
||||||
font-size: 100.0%;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
.toc dl,
|
|
||||||
.list-of-tables dl,
|
|
||||||
.list-of-figures dl,
|
|
||||||
.list-of-examples dl {
|
|
||||||
margin: 0em 0em 0.5em 0em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.toc dt {
|
|
||||||
margin: 0em 0em 0em 0em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.toc dd {
|
|
||||||
margin: 0em 0em 0em 2.6em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.glossary dl,
|
|
||||||
div.variablelist dl {
|
|
||||||
}
|
|
||||||
|
|
||||||
.glossary dl dt,
|
|
||||||
.variablelist dl dt,
|
|
||||||
.variablelist dl dt span.term {
|
|
||||||
font-weight: normal;
|
|
||||||
width: 20em;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
.variablelist dl dt {
|
|
||||||
margin-top: 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.glossary dl dd,
|
|
||||||
.variablelist dl dd {
|
|
||||||
margin-top: -1em;
|
|
||||||
margin-left: 25.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.glossary dd p,
|
|
||||||
.variablelist dd p {
|
|
||||||
margin-top: 0em;
|
|
||||||
margin-bottom: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.calloutlist table td {
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
margin: 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.calloutlist table td p {
|
|
||||||
margin-top: 0em;
|
|
||||||
margin-bottom: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div p.copyright {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.legalnotice p.legalnotice-title {
|
|
||||||
margin-bottom: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
p {
|
|
||||||
line-height: 1.5em;
|
|
||||||
margin-top: 0em;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
dl {
|
|
||||||
padding-top: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
hr {
|
|
||||||
border: solid 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.mediaobject,
|
|
||||||
.mediaobjectco {
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
img {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul {
|
|
||||||
padding: 0em 0em 0em 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul li {
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
ul li p {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
table {
|
|
||||||
width :100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
th {
|
|
||||||
padding: 0.25em;
|
|
||||||
text-align: left;
|
|
||||||
font-weight: normal;
|
|
||||||
vertical-align: top;
|
|
||||||
}
|
|
||||||
|
|
||||||
td {
|
|
||||||
padding: 0.25em;
|
|
||||||
vertical-align: top;
|
|
||||||
}
|
|
||||||
|
|
||||||
p a[id] {
|
|
||||||
margin: 0px;
|
|
||||||
padding: 0px;
|
|
||||||
display: inline;
|
|
||||||
background-image: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
text-decoration: underline;
|
|
||||||
color: #444;
|
|
||||||
}
|
|
||||||
|
|
||||||
pre {
|
|
||||||
overflow: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
/*font-weight: bold;*/
|
|
||||||
}
|
|
||||||
|
|
||||||
/* This style defines how the permalink character
|
|
||||||
appears by itself and when hovered over with
|
|
||||||
the mouse. */
|
|
||||||
|
|
||||||
[alt='Permalink'] { color: #eee; }
|
|
||||||
[alt='Permalink']:hover { color: black; }
|
|
||||||
|
|
||||||
|
|
||||||
div.informalfigure,
|
|
||||||
div.informalexample,
|
|
||||||
div.informaltable,
|
|
||||||
div.figure,
|
|
||||||
div.table,
|
|
||||||
div.example {
|
|
||||||
margin: 1em 0em;
|
|
||||||
padding: 1em;
|
|
||||||
page-break-inside: avoid;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.informalfigure p.title b,
|
|
||||||
div.informalexample p.title b,
|
|
||||||
div.informaltable p.title b,
|
|
||||||
div.figure p.title b,
|
|
||||||
div.example p.title b,
|
|
||||||
div.table p.title b{
|
|
||||||
padding-top: 0em;
|
|
||||||
margin-top: 0em;
|
|
||||||
font-size: 100%;
|
|
||||||
font-weight: normal;
|
|
||||||
}
|
|
||||||
|
|
||||||
.mediaobject .caption,
|
|
||||||
.mediaobject .caption p {
|
|
||||||
text-align: center;
|
|
||||||
font-size: 80%;
|
|
||||||
padding-top: 0.5em;
|
|
||||||
padding-bottom: 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.epigraph {
|
|
||||||
padding-left: 55%;
|
|
||||||
margin-bottom: 1em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.epigraph p {
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
.epigraph .quote {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
.epigraph .attribution {
|
|
||||||
font-style: normal;
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
span.application {
|
|
||||||
font-style: italic;
|
|
||||||
}
|
|
||||||
|
|
||||||
.programlisting {
|
|
||||||
font-family: monospace;
|
|
||||||
font-size: 80%;
|
|
||||||
white-space: pre;
|
|
||||||
margin: 1.33em 0em;
|
|
||||||
padding: 1.33em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tip,
|
|
||||||
.warning,
|
|
||||||
.caution,
|
|
||||||
.note {
|
|
||||||
margin-top: 1em;
|
|
||||||
margin-bottom: 1em;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/* force full width of table within div */
|
|
||||||
.tip table,
|
|
||||||
.warning table,
|
|
||||||
.caution table,
|
|
||||||
.note table {
|
|
||||||
border: none;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.tip table th,
|
|
||||||
.warning table th,
|
|
||||||
.caution table th,
|
|
||||||
.note table th {
|
|
||||||
padding: 0.8em 0.0em 0.0em 0.0em;
|
|
||||||
margin : 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tip p,
|
|
||||||
.warning p,
|
|
||||||
.caution p,
|
|
||||||
.note p {
|
|
||||||
margin-top: 0.5em;
|
|
||||||
margin-bottom: 0.5em;
|
|
||||||
padding-right: 1em;
|
|
||||||
text-align: left;
|
|
||||||
}
|
|
||||||
|
|
||||||
.acronym {
|
|
||||||
text-transform: uppercase;
|
|
||||||
}
|
|
||||||
|
|
||||||
b.keycap,
|
|
||||||
.keycap {
|
|
||||||
padding: 0.09em 0.3em;
|
|
||||||
margin: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.itemizedlist li {
|
|
||||||
clear: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
.filename {
|
|
||||||
font-size: medium;
|
|
||||||
font-family: Courier, monospace;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.navheader, div.heading{
|
|
||||||
position: absolute;
|
|
||||||
left: 0em;
|
|
||||||
top: 0em;
|
|
||||||
width: 100%;
|
|
||||||
background-color: #cdf;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navfooter, div.footing{
|
|
||||||
position: fixed;
|
|
||||||
left: 0em;
|
|
||||||
bottom: 0em;
|
|
||||||
background-color: #eee;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.navheader td,
|
|
||||||
div.navfooter td {
|
|
||||||
font-size: 66%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navheader table th {
|
|
||||||
/*font-family: Georgia, Times, serif;*/
|
|
||||||
/*font-size: x-large;*/
|
|
||||||
font-size: 80%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navheader table {
|
|
||||||
border-left: 0em;
|
|
||||||
border-right: 0em;
|
|
||||||
border-top: 0em;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navfooter table {
|
|
||||||
border-left: 0em;
|
|
||||||
border-right: 0em;
|
|
||||||
border-bottom: 0em;
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navheader table td a,
|
|
||||||
div.navfooter table td a {
|
|
||||||
color: #777;
|
|
||||||
text-decoration: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* normal text in the footer */
|
|
||||||
div.navfooter table td {
|
|
||||||
color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navheader table td a:visited,
|
|
||||||
div.navfooter table td a:visited {
|
|
||||||
color: #444;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/* links in header and footer */
|
|
||||||
div.navheader table td a:hover,
|
|
||||||
div.navfooter table td a:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
background-color: transparent;
|
|
||||||
color: #33a;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.navheader hr,
|
|
||||||
div.navfooter hr {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.qandaset tr.question td p {
|
|
||||||
margin: 0em 0em 1em 0em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.qandaset tr.answer td p {
|
|
||||||
margin: 0em 0em 1em 0em;
|
|
||||||
padding: 0em 0em 0em 0em;
|
|
||||||
}
|
|
||||||
.answer td {
|
|
||||||
padding-bottom: 1.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.emphasis {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/************* /
|
|
||||||
/ decorations /
|
|
||||||
/ *************/
|
|
||||||
|
|
||||||
.titlepage {
|
|
||||||
}
|
|
||||||
|
|
||||||
.part .title {
|
|
||||||
}
|
|
||||||
|
|
||||||
.subtitle {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
h1 {
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
h2 {
|
|
||||||
border-top: solid 0.2em;
|
|
||||||
border-bottom: solid 0.06em;
|
|
||||||
}
|
|
||||||
|
|
||||||
h3 {
|
|
||||||
border-top: 0em;
|
|
||||||
border-bottom: solid 0.06em;
|
|
||||||
}
|
|
||||||
|
|
||||||
h4 {
|
|
||||||
border: 0em;
|
|
||||||
border-bottom: solid 0.06em;
|
|
||||||
}
|
|
||||||
|
|
||||||
h5 {
|
|
||||||
border: 0em;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
.programlisting {
|
|
||||||
border: solid 1px;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.figure,
|
|
||||||
div.table,
|
|
||||||
div.informalfigure,
|
|
||||||
div.informaltable,
|
|
||||||
div.informalexample,
|
|
||||||
div.example {
|
|
||||||
border: 1px solid;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.tip,
|
|
||||||
.warning,
|
|
||||||
.caution,
|
|
||||||
.note {
|
|
||||||
border: 1px solid;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tip table th,
|
|
||||||
.warning table th,
|
|
||||||
.caution table th,
|
|
||||||
.note table th {
|
|
||||||
border-bottom: 1px solid;
|
|
||||||
}
|
|
||||||
|
|
||||||
.question td {
|
|
||||||
border-top: 1px solid black;
|
|
||||||
}
|
|
||||||
|
|
||||||
.answer {
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
b.keycap,
|
|
||||||
.keycap {
|
|
||||||
border: 1px solid;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.navheader, div.heading{
|
|
||||||
border-bottom: 1px solid;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.navfooter, div.footing{
|
|
||||||
border-top: 1px solid;
|
|
||||||
}
|
|
||||||
|
|
||||||
/********* /
|
|
||||||
/ colors /
|
|
||||||
/ *********/
|
|
||||||
|
|
||||||
body {
|
|
||||||
color: #333;
|
|
||||||
background: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
background: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
background-color: #dedede;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
h1,
|
|
||||||
h2,
|
|
||||||
h3,
|
|
||||||
h4,
|
|
||||||
h5,
|
|
||||||
h6,
|
|
||||||
h7,
|
|
||||||
h8 {
|
|
||||||
background-color: transparent;
|
|
||||||
}
|
|
||||||
|
|
||||||
hr {
|
|
||||||
border-color: #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.tip, .warning, .caution, .note {
|
|
||||||
border-color: #fff;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.tip table th,
|
|
||||||
.warning table th,
|
|
||||||
.caution table th,
|
|
||||||
.note table th {
|
|
||||||
border-bottom-color: #fff;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.warning {
|
|
||||||
background-color: #f0f0f2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.caution {
|
|
||||||
background-color: #f0f0f2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tip {
|
|
||||||
background-color: #f0f0f2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.note {
|
|
||||||
background-color: #f0f0f2;
|
|
||||||
}
|
|
||||||
|
|
||||||
.glossary dl dt,
|
|
||||||
.variablelist dl dt,
|
|
||||||
.variablelist dl dt span.term {
|
|
||||||
color: #044;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.figure,
|
|
||||||
div.table,
|
|
||||||
div.example,
|
|
||||||
div.informalfigure,
|
|
||||||
div.informaltable,
|
|
||||||
div.informalexample {
|
|
||||||
border-color: #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
pre.programlisting {
|
|
||||||
color: black;
|
|
||||||
background-color: #fff;
|
|
||||||
border-color: #aaa;
|
|
||||||
border-width: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.guimenu,
|
|
||||||
.guilabel,
|
|
||||||
.guimenuitem {
|
|
||||||
background-color: #eee;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
b.keycap,
|
|
||||||
.keycap {
|
|
||||||
background-color: #eee;
|
|
||||||
border-color: #999;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.navheader {
|
|
||||||
border-color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
div.navfooter {
|
|
||||||
border-color: black;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*********** /
|
|
||||||
/ graphics /
|
|
||||||
/ ***********/
|
|
||||||
|
|
||||||
/*
|
|
||||||
body {
|
|
||||||
background-image: url("images/body_bg.jpg");
|
|
||||||
background-attachment: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.navheader,
|
|
||||||
.note,
|
|
||||||
.tip {
|
|
||||||
background-image: url("images/note_bg.jpg");
|
|
||||||
background-attachment: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.warning,
|
|
||||||
.caution {
|
|
||||||
background-image: url("images/warning_bg.jpg");
|
|
||||||
background-attachment: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
.figure,
|
|
||||||
.informalfigure,
|
|
||||||
.example,
|
|
||||||
.informalexample,
|
|
||||||
.table,
|
|
||||||
.informaltable {
|
|
||||||
background-image: url("images/figure_bg.jpg");
|
|
||||||
background-attachment: fixed;
|
|
||||||
}
|
|
||||||
|
|
||||||
*/
|
|
||||||
h1,
|
|
||||||
h2,
|
|
||||||
h3,
|
|
||||||
h4,
|
|
||||||
h5,
|
|
||||||
h6,
|
|
||||||
h7{
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
Example of how to stick an image as part of the title.
|
|
||||||
|
|
||||||
div.article .titlepage .title
|
|
||||||
{
|
|
||||||
background-image: url("figures/white-on-black.png");
|
|
||||||
background-position: center;
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
div.preface .titlepage .title,
|
|
||||||
div.colophon .title,
|
|
||||||
div.chapter .titlepage .title,
|
|
||||||
div.article .titlepage .title
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
div.section div.section .titlepage .title,
|
|
||||||
div.sect2 .titlepage .title {
|
|
||||||
background: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
h1.title {
|
|
||||||
background-color: transparent;
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
height: 256px;
|
|
||||||
text-indent: -9000px;
|
|
||||||
overflow:hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
h2.subtitle {
|
|
||||||
background-color: transparent;
|
|
||||||
text-indent: -9000px;
|
|
||||||
overflow:hidden;
|
|
||||||
width: 0px;
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*************************************** /
|
|
||||||
/ pippin.gimp.org specific alterations /
|
|
||||||
/ ***************************************/
|
|
||||||
|
|
||||||
/*
|
|
||||||
div.heading, div.navheader {
|
|
||||||
color: #777;
|
|
||||||
font-size: 80%;
|
|
||||||
padding: 0;
|
|
||||||
margin: 0;
|
|
||||||
text-align: left;
|
|
||||||
position: absolute;
|
|
||||||
top: 0px;
|
|
||||||
left: 0px;
|
|
||||||
width: 100%;
|
|
||||||
height: 50px;
|
|
||||||
background: url('/gfx/heading_bg.png') transparent;
|
|
||||||
background-repeat: repeat-x;
|
|
||||||
background-attachment: fixed;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.heading a {
|
|
||||||
color: #444;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footing, div.navfooter {
|
|
||||||
border: none;
|
|
||||||
color: #ddd;
|
|
||||||
font-size: 80%;
|
|
||||||
text-align:right;
|
|
||||||
|
|
||||||
width: 100%;
|
|
||||||
padding-top: 10px;
|
|
||||||
position: absolute;
|
|
||||||
bottom: 0px;
|
|
||||||
left: 0px;
|
|
||||||
|
|
||||||
background: url('/gfx/footing_bg.png') transparent;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/****************** /
|
|
||||||
/ nasty ie tweaks /
|
|
||||||
/ ******************/
|
|
||||||
|
|
||||||
/*
|
|
||||||
div.heading, div.navheader {
|
|
||||||
width:expression(document.body.clientWidth + "px");
|
|
||||||
}
|
|
||||||
|
|
||||||
div.footing, div.navfooter {
|
|
||||||
width:expression(document.body.clientWidth + "px");
|
|
||||||
margin-left:expression("-5em");
|
|
||||||
}
|
|
||||||
body {
|
|
||||||
padding:expression("4em 5em 0em 5em");
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**************************************** /
|
|
||||||
/ mozilla vendor specific css extensions /
|
|
||||||
/ ****************************************/
|
|
||||||
/*
|
|
||||||
div.navfooter, div.footing{
|
|
||||||
-moz-opacity: 0.8em;
|
|
||||||
}
|
|
||||||
|
|
||||||
div.figure,
|
|
||||||
div.table,
|
|
||||||
div.informalfigure,
|
|
||||||
div.informaltable,
|
|
||||||
div.informalexample,
|
|
||||||
div.example,
|
|
||||||
.tip,
|
|
||||||
.warning,
|
|
||||||
.caution,
|
|
||||||
.note {
|
|
||||||
-moz-border-radius: 0.5em;
|
|
||||||
}
|
|
||||||
|
|
||||||
b.keycap,
|
|
||||||
.keycap {
|
|
||||||
-moz-border-radius: 0.3em;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
table tr td table tr td {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
hr {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
table {
|
|
||||||
border: 0em;
|
|
||||||
}
|
|
||||||
|
|
||||||
.photo {
|
|
||||||
float: right;
|
|
||||||
margin-left: 1.5em;
|
|
||||||
margin-bottom: 1.5em;
|
|
||||||
margin-top: 0em;
|
|
||||||
max-width: 17em;
|
|
||||||
border: 1px solid gray;
|
|
||||||
padding: 3px;
|
|
||||||
background: white;
|
|
||||||
}
|
|
||||||
.seperator {
|
|
||||||
padding-top: 2em;
|
|
||||||
clear: both;
|
|
||||||
}
|
|
||||||
|
|
||||||
#validators {
|
|
||||||
margin-top: 5em;
|
|
||||||
text-align: right;
|
|
||||||
color: #777;
|
|
||||||
}
|
|
||||||
@media print {
|
|
||||||
body {
|
|
||||||
font-size: 8pt;
|
|
||||||
}
|
|
||||||
.noprint {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
.tip,
|
|
||||||
.note {
|
|
||||||
background: #f0f0f2;
|
|
||||||
color: #333;
|
|
||||||
padding: 20px;
|
|
||||||
margin: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tip h3,
|
|
||||||
.note h3 {
|
|
||||||
padding: 0em;
|
|
||||||
margin: 0em;
|
|
||||||
font-size: 2em;
|
|
||||||
font-weight: bold;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
.tip a,
|
|
||||||
.note a {
|
|
||||||
color: #333;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
.footnote {
|
|
||||||
font-size: small;
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Changes the announcement text */
|
|
||||||
.tip h3,
|
|
||||||
.warning h3,
|
|
||||||
.caution h3,
|
|
||||||
.note h3 {
|
|
||||||
font-size:large;
|
|
||||||
color: #00557D;
|
|
||||||
}
|
|
|
@ -1,88 +0,0 @@
|
||||||
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
|
||||||
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
|
||||||
|
|
||||||
<book id='bitbake-user-manual' lang='en'
|
|
||||||
xmlns:xi="http://www.w3.org/2003/XInclude"
|
|
||||||
xmlns="http://docbook.org/ns/docbook"
|
|
||||||
>
|
|
||||||
<bookinfo>
|
|
||||||
|
|
||||||
<mediaobject>
|
|
||||||
<imageobject>
|
|
||||||
<imagedata fileref='figures/bitbake-title.png'
|
|
||||||
format='SVG'
|
|
||||||
align='left' scalefit='1' width='100%'/>
|
|
||||||
</imageobject>
|
|
||||||
</mediaobject>
|
|
||||||
|
|
||||||
<title>
|
|
||||||
BitBake User Manual
|
|
||||||
</title>
|
|
||||||
|
|
||||||
<authorgroup>
|
|
||||||
<author>
|
|
||||||
<firstname>Richard Purdie, Chris Larson, and </firstname> <surname>Phil Blundell</surname>
|
|
||||||
<affiliation>
|
|
||||||
<orgname>BitBake Community</orgname>
|
|
||||||
</affiliation>
|
|
||||||
<email>bitbake-devel@lists.openembedded.org</email>
|
|
||||||
</author>
|
|
||||||
</authorgroup>
|
|
||||||
|
|
||||||
<!--
|
|
||||||
# Add in some revision history if we want it here.
|
|
||||||
<revhistory>
|
|
||||||
<revision>
|
|
||||||
<revnumber>x.x</revnumber>
|
|
||||||
<date>dd month year</date>
|
|
||||||
<revremark>Some relevent comment</revremark>
|
|
||||||
</revision>
|
|
||||||
<revision>
|
|
||||||
<revnumber>x.x</revnumber>
|
|
||||||
<date>dd month year</date>
|
|
||||||
<revremark>Some relevent comment</revremark>
|
|
||||||
</revision>
|
|
||||||
<revision>
|
|
||||||
<revnumber>x.x</revnumber>
|
|
||||||
<date>dd month year</date>
|
|
||||||
<revremark>Some relevent comment</revremark>
|
|
||||||
</revision>
|
|
||||||
<revision>
|
|
||||||
<revnumber>x.x</revnumber>
|
|
||||||
<date>dd month year</date>
|
|
||||||
<revremark>Some relevent comment</revremark>
|
|
||||||
</revision>
|
|
||||||
</revhistory>
|
|
||||||
-->
|
|
||||||
|
|
||||||
<copyright>
|
|
||||||
<year>2004-2016</year>
|
|
||||||
<holder>Richard Purdie</holder>
|
|
||||||
<holder>Chris Larson</holder>
|
|
||||||
<holder>and Phil Blundell</holder>
|
|
||||||
</copyright>
|
|
||||||
|
|
||||||
<legalnotice>
|
|
||||||
<para>
|
|
||||||
This work is licensed under the Creative Commons Attribution License.
|
|
||||||
To view a copy of this license, visit
|
|
||||||
<ulink url="http://creativecommons.org/licenses/by/2.5/">http://creativecommons.org/licenses/by/2.5/</ulink>
|
|
||||||
or send a letter to Creative Commons, 444 Castro Street,
|
|
||||||
Suite 900, Mountain View, California 94041, USA.
|
|
||||||
</para>
|
|
||||||
</legalnotice>
|
|
||||||
</bookinfo>
|
|
||||||
|
|
||||||
<xi:include href="bitbake-user-manual-intro.xml"/>
|
|
||||||
|
|
||||||
<xi:include href="bitbake-user-manual-execution.xml"/>
|
|
||||||
|
|
||||||
<xi:include href="bitbake-user-manual-metadata.xml"/>
|
|
||||||
|
|
||||||
<xi:include href="bitbake-user-manual-fetching.xml"/>
|
|
||||||
|
|
||||||
<xi:include href="bitbake-user-manual-ref-variables.xml"/>
|
|
||||||
|
|
||||||
<xi:include href="bitbake-user-manual-hello.xml"/>
|
|
||||||
|
|
||||||
</book>
|
|
Binary file not shown.
Before Width: | Height: | Size: 5.0 KiB |
|
@ -89,7 +89,7 @@ quit after parsing the BB files (developers only)
|
||||||
show current and preferred versions of all packages
|
show current and preferred versions of all packages
|
||||||
.TP
|
.TP
|
||||||
.B \-e, \-\-environment
|
.B \-e, \-\-environment
|
||||||
show the global or per-recipe environment (this is what used to be bbread)
|
show the global or per-package environment (this is what used to be bbread)
|
||||||
.TP
|
.TP
|
||||||
.B \-g, \-\-graphviz
|
.B \-g, \-\-graphviz
|
||||||
emit the dependency trees of the specified packages in the dot syntax
|
emit the dependency trees of the specified packages in the dot syntax
|
||||||
|
@ -104,30 +104,6 @@ Show debug logging for the specified logging domains
|
||||||
.B \-P, \-\-profile
|
.B \-P, \-\-profile
|
||||||
profile the command and print a report
|
profile the command and print a report
|
||||||
.TP
|
.TP
|
||||||
.B \-uUI, \-\-ui=UI
|
|
||||||
User interface to use. Currently, knotty, taskexp or ncurses can be specified as UI.
|
|
||||||
.TP
|
|
||||||
.B \-tSERVERTYPE, \-\-servertype=SERVERTYPE
|
|
||||||
Choose which server to use, none, process or xmlrpc.
|
|
||||||
.TP
|
|
||||||
.B \-\-revisions-changed
|
|
||||||
Set the exit code depending on whether upstream floating revisions have changed or not.
|
|
||||||
.TP
|
|
||||||
.B \-\-server-only
|
|
||||||
Run bitbake without UI, the frontend can connect with bitbake server itself.
|
|
||||||
.TP
|
|
||||||
.B \-BBIND, \-\-bind=BIND
|
|
||||||
The name/address for the bitbake server to bind to.
|
|
||||||
.TP
|
|
||||||
.B \-\-no\-setscene
|
|
||||||
Do not run any setscene tasks, forces builds.
|
|
||||||
|
|
||||||
.SH ENVIRONMENT VARIABLES
|
|
||||||
bitbake uses the following environment variables to control its
|
|
||||||
operation:
|
|
||||||
.TP
|
|
||||||
.B BITBAKE_UI
|
|
||||||
The bitbake user interface; overridden by the \fB-u\fP commandline option.
|
|
||||||
|
|
||||||
.SH AUTHORS
|
.SH AUTHORS
|
||||||
BitBake was written by
|
BitBake was written by
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
topdir = .
|
||||||
|
manual = $(topdir)/usermanual.xml
|
||||||
|
# types = pdf txt rtf ps xhtml html man tex texi dvi
|
||||||
|
# types = pdf txt
|
||||||
|
types = $(xmltotypes) $(htmltypes)
|
||||||
|
xmltotypes = pdf txt
|
||||||
|
htmltypes = html xhtml
|
||||||
|
htmlxsl = $(if $(filter $@,$(foreach type,$(htmltypes),$(type)-nochunks)),http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl,http://docbook.sourceforge.net/release/xsl/current/$@/chunk.xsl)
|
||||||
|
htmlcssfile = docbook.css
|
||||||
|
htmlcss = $(topdir)/html.css
|
||||||
|
# htmlcssfile =
|
||||||
|
# htmlcss =
|
||||||
|
cleanfiles = $(foreach i,$(types),$(topdir)/$(i))
|
||||||
|
|
||||||
|
ifdef DEBUG
|
||||||
|
define command
|
||||||
|
$(1)
|
||||||
|
endef
|
||||||
|
else
|
||||||
|
define command
|
||||||
|
@echo $(2) $(3) $(4)
|
||||||
|
@$(1) >/dev/null
|
||||||
|
endef
|
||||||
|
endif
|
||||||
|
|
||||||
|
all: $(types)
|
||||||
|
|
||||||
|
lint: $(manual) FORCE
|
||||||
|
$(call command,xmllint --xinclude --postvalid --noout $(manual),XMLLINT $(manual))
|
||||||
|
|
||||||
|
$(types) $(foreach type,$(htmltypes),$(type)-nochunks): lint FORCE
|
||||||
|
|
||||||
|
$(foreach type,$(htmltypes),$(type)-nochunks): $(if $(htmlcss),$(htmlcss)) $(manual)
|
||||||
|
@mkdir -p $@
|
||||||
|
ifdef htmlcss
|
||||||
|
$(call command,install -m 0644 $(htmlcss) $@/$(htmlcssfile),CP $(htmlcss) $@/$(htmlcssfile))
|
||||||
|
endif
|
||||||
|
$(call command,xsltproc --stringparam base.dir $@/ $(if $(htmlcssfile),--stringparam html.stylesheet $(htmlcssfile)) $(htmlxsl) $(manual) > $@/index.$(patsubst %-nochunks,%,$@),XSLTPROC $@ $(manual))
|
||||||
|
|
||||||
|
$(htmltypes): $(if $(htmlcss),$(htmlcss)) $(manual)
|
||||||
|
@mkdir -p $@
|
||||||
|
ifdef htmlcss
|
||||||
|
$(call command,install -m 0644 $(htmlcss) $@/$(htmlcssfile),CP $(htmlcss) $@/$(htmlcssfile))
|
||||||
|
endif
|
||||||
|
$(call command,xsltproc --stringparam base.dir $@/ $(if $(htmlcssfile),--stringparam html.stylesheet $(htmlcssfile)) $(htmlxsl) $(manual),XSLTPROC $@ $(manual))
|
||||||
|
|
||||||
|
$(xmltotypes): $(manual)
|
||||||
|
$(call command,xmlto --with-dblatex --extensions -o $(topdir)/$@ $@ $(manual),XMLTO $@ $(manual))
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf $(cleanfiles)
|
||||||
|
|
||||||
|
$(foreach i,$(types) $(foreach type,$(htmltypes),$(type)-nochunks),clean-$(i)):
|
||||||
|
rm -rf $(patsubst clean-%,%,$@)
|
||||||
|
|
||||||
|
FORCE:
|
|
@ -0,0 +1,611 @@
|
||||||
|
<?xml version="1.0"?>
|
||||||
|
<!--
|
||||||
|
ex:ts=4:sw=4:sts=4:et
|
||||||
|
-*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
-->
|
||||||
|
<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
|
||||||
|
"http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
|
||||||
|
<book>
|
||||||
|
<bookinfo>
|
||||||
|
<title>BitBake User Manual</title>
|
||||||
|
<authorgroup>
|
||||||
|
<corpauthor>BitBake Team</corpauthor>
|
||||||
|
</authorgroup>
|
||||||
|
<copyright>
|
||||||
|
<year>2004, 2005, 2006, 2011</year>
|
||||||
|
<holder>Chris Larson</holder>
|
||||||
|
<holder>Phil Blundell</holder>
|
||||||
|
<holder>Richard Purdie</holder>
|
||||||
|
</copyright>
|
||||||
|
<legalnotice>
|
||||||
|
<para>This work is licensed under the Creative Commons Attribution License. To view a copy of this license, visit <ulink url="http://creativecommons.org/licenses/by/2.5/">http://creativecommons.org/licenses/by/2.5/</ulink> or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.</para>
|
||||||
|
</legalnotice>
|
||||||
|
</bookinfo>
|
||||||
|
<chapter>
|
||||||
|
<title>Introduction</title>
|
||||||
|
<section>
|
||||||
|
<title>Overview</title>
|
||||||
|
<para>BitBake is, at its simplest, a tool for executing
|
||||||
|
tasks and managing metadata. As such, its similarities to GNU make and other
|
||||||
|
build tools are readily apparent. It was inspired by Portage, the package management system used by the Gentoo Linux distribution. BitBake is the basis of the <ulink url="http://www.openembedded.org/">OpenEmbedded</ulink> project, which is being used to build and maintain a number of embedded Linux distributions/projects such as Angstrom and the Yocto project.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Background and goals</title>
|
||||||
|
<para>Prior to BitBake, no other build tool adequately met
|
||||||
|
the needs of an aspiring embedded Linux distribution. All of the
|
||||||
|
buildsystems used by traditional desktop Linux distributions lacked
|
||||||
|
important functionality, and none of the ad-hoc
|
||||||
|
<emphasis>buildroot</emphasis> systems, prevalent in the
|
||||||
|
embedded space, were scalable or maintainable.</para>
|
||||||
|
|
||||||
|
<para>Some important original goals for BitBake were:
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem><para>Handle crosscompilation.</para></listitem>
|
||||||
|
<listitem><para>Handle interpackage dependencies (build time on target architecture, build time on native architecture, and runtime).</para></listitem>
|
||||||
|
<listitem><para>Support running any number of tasks within a given package, including, but not limited to, fetching upstream sources, unpacking them, patching them, configuring them, et cetera.</para></listitem>
|
||||||
|
<listitem><para>Must be Linux distribution agnostic (both build and target).</para></listitem>
|
||||||
|
<listitem><para>Must be architecture agnostic</para></listitem>
|
||||||
|
<listitem><para>Must support multiple build and target operating systems (including Cygwin, the BSDs, etc).</para></listitem>
|
||||||
|
<listitem><para>Must be able to be self contained, rather than tightly integrated into the build machine's root filesystem.</para></listitem>
|
||||||
|
<listitem><para>There must be a way to handle conditional metadata (on target architecture, operating system, distribution, machine).</para></listitem>
|
||||||
|
<listitem><para>It must be easy for the person using the tools to supply their own local metadata and packages to operate against.</para></listitem>
|
||||||
|
<listitem><para>Must make it easy to collaborate
|
||||||
|
between multiple projects using BitBake for their
|
||||||
|
builds.</para></listitem>
|
||||||
|
<listitem><para>Should provide an inheritance mechanism to
|
||||||
|
share common metadata between many packages.</para></listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</para>
|
||||||
|
<para>Over time it has become apparent that some further requirements were necessary:
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem><para>Handle variants of a base recipe (native, sdk, multilib).</para></listitem>
|
||||||
|
<listitem><para>Able to split metadata into layers and allow layers to override each other.</para></listitem>
|
||||||
|
<listitem><para>Allow representation of a given set of input variables to a task as a checksum.</para></listitem>
|
||||||
|
<listitem><para>based on that checksum, allow acceleration of builds with prebuilt components.</para></listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>BitBake satisfies all the original requirements and many more with extensions being made to the basic functionality to reflect the additionl requirements. Flexibility and power have always been the priorities. It is highly extensible, supporting embedded Python code and execution of any arbitrary tasks.</para>
|
||||||
|
</section>
|
||||||
|
</chapter>
|
||||||
|
<chapter>
|
||||||
|
<title>Metadata</title>
|
||||||
|
<section>
|
||||||
|
<title>Description</title>
|
||||||
|
<itemizedlist>
|
||||||
|
<para>BitBake metadata can be classified into 3 major areas:</para>
|
||||||
|
<listitem>
|
||||||
|
<para>Configuration Files</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>.bb Files</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem>
|
||||||
|
<para>Classes</para>
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
<para>What follows are a large number of examples of BitBake metadata. Any syntax which isn't supported in any of the aforementioned areas will be documented as such.</para>
|
||||||
|
<section>
|
||||||
|
<title>Basic variable setting</title>
|
||||||
|
<para><screen><varname>VARIABLE</varname> = "value"</screen></para>
|
||||||
|
<para>In this example, <varname>VARIABLE</varname> is <literal>value</literal>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Variable expansion</title>
|
||||||
|
<para>BitBake supports variables referencing one another's contents using a syntax which is similar to shell scripting</para>
|
||||||
|
<para><screen><varname>A</varname> = "aval"
|
||||||
|
<varname>B</varname> = "pre${A}post"</screen></para>
|
||||||
|
<para>This results in <varname>A</varname> containing <literal>aval</literal> and <varname>B</varname> containing <literal>preavalpost</literal>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Setting a default value (?=)</title>
|
||||||
|
<para><screen><varname>A</varname> ?= "aval"</screen></para>
|
||||||
|
<para>If <varname>A</varname> is set before the above is called, it will retain its previous value. If <varname>A</varname> is unset prior to the above call, <varname>A</varname> will be set to <literal>aval</literal>. Note that this assignment is immediate, so if there are multiple ?= assignments to a single variable, the first of those will be used.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Setting a weak default value (??=)</title>
|
||||||
|
<para><screen><varname>A</varname> ??= "somevalue"
|
||||||
|
<varname>A</varname> ??= "someothervalue"</screen></para>
|
||||||
|
<para>If <varname>A</varname> is set before the above, it will retain that value. If <varname>A</varname> is unset prior to the above, <varname>A</varname> will be set to <literal>someothervalue</literal>. This is a lazy/weak assignment in that the assignment does not occur until the end of the parsing process, so that the last, rather than the first, ??= assignment to a given variable will be used. Any other setting of A using = or ?= will however override the value set with ??=</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Immediate variable expansion (:=)</title>
|
||||||
|
<para>:= results in a variable's contents being expanded immediately, rather than when the variable is actually used.</para>
|
||||||
|
<para><screen><varname>T</varname> = "123"
|
||||||
|
<varname>A</varname> := "${B} ${A} test ${T}"
|
||||||
|
<varname>T</varname> = "456"
|
||||||
|
<varname>B</varname> = "${T} bval"
|
||||||
|
|
||||||
|
<varname>C</varname> = "cval"
|
||||||
|
<varname>C</varname> := "${C}append"</screen></para>
|
||||||
|
<para>In that example, <varname>A</varname> would contain <literal> test 123</literal>, <varname>B</varname> would contain <literal>456 bval</literal>, and <varname>C</varname> would be <literal>cvalappend</literal>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Appending (+=) and prepending (=+)</title>
|
||||||
|
<para><screen><varname>B</varname> = "bval"
|
||||||
|
<varname>B</varname> += "additionaldata"
|
||||||
|
<varname>C</varname> = "cval"
|
||||||
|
<varname>C</varname> =+ "test"</screen></para>
|
||||||
|
<para>In this example, <varname>B</varname> is now <literal>bval additionaldata</literal> and <varname>C</varname> is <literal>test cval</literal>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Appending (.=) and prepending (=.) without spaces</title>
|
||||||
|
<para><screen><varname>B</varname> = "bval"
|
||||||
|
<varname>B</varname> .= "additionaldata"
|
||||||
|
<varname>C</varname> = "cval"
|
||||||
|
<varname>C</varname> =. "test"</screen></para>
|
||||||
|
<para>In this example, <varname>B</varname> is now <literal>bvaladditionaldata</literal> and <varname>C</varname> is <literal>testcval</literal>. In contrast to the above appending and prepending operators, no additional space
|
||||||
|
will be introduced.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Conditional metadata set</title>
|
||||||
|
<para>OVERRIDES is a <quote>:</quote> separated variable containing each item you want to satisfy conditions. So, if you have a variable which is conditional on <quote>arm</quote>, and <quote>arm</quote> is in OVERRIDES, then the <quote>arm</quote> specific version of the variable is used rather than the non-conditional version. Example:</para>
|
||||||
|
<para><screen><varname>OVERRIDES</varname> = "architecture:os:machine"
|
||||||
|
<varname>TEST</varname> = "defaultvalue"
|
||||||
|
<varname>TEST_os</varname> = "osspecificvalue"
|
||||||
|
<varname>TEST_condnotinoverrides</varname> = "othercondvalue"</screen></para>
|
||||||
|
<para>In this example, <varname>TEST</varname> would be <literal>osspecificvalue</literal>, due to the condition <quote>os</quote> being in <varname>OVERRIDES</varname>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Conditional appending</title>
|
||||||
|
<para>BitBake also supports appending and prepending to variables based on whether something is in OVERRIDES. Example:</para>
|
||||||
|
<para><screen><varname>DEPENDS</varname> = "glibc ncurses"
|
||||||
|
<varname>OVERRIDES</varname> = "machine:local"
|
||||||
|
<varname>DEPENDS_append_machine</varname> = " libmad"</screen></para>
|
||||||
|
<para>In this example, <varname>DEPENDS</varname> is set to <literal>glibc ncurses libmad</literal>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Inclusion</title>
|
||||||
|
<para>Next, there is the <literal>include</literal> directive, which causes BitBake to parse whatever file you specify, and insert it at that location, which is not unlike <command>make</command>. However, if the path specified on the <literal>include</literal> line is a relative path, BitBake will locate the first one it can find within <envar>BBPATH</envar>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Requiring inclusion</title>
|
||||||
|
<para>In contrast to the <literal>include</literal> directive, <literal>require</literal> will
|
||||||
|
raise an ParseError if the file to be included cannot be found. Otherwise it will behave just like the <literal>
|
||||||
|
include</literal> directive.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Python variable expansion</title>
|
||||||
|
<para><screen><varname>DATE</varname> = "${@time.strftime('%Y%m%d',time.gmtime())}"</screen></para>
|
||||||
|
<para>This would result in the <varname>DATE</varname> variable containing today's date.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Defining executable metadata</title>
|
||||||
|
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||||
|
<para><screen>do_mytask () {
|
||||||
|
echo "Hello, world!"
|
||||||
|
}</screen></para>
|
||||||
|
<para>This is essentially identical to setting a variable, except that this variable happens to be executable shell code.</para>
|
||||||
|
<para><screen>python do_printdate () {
|
||||||
|
import time
|
||||||
|
print time.strftime('%Y%m%d', time.gmtime())
|
||||||
|
}</screen></para>
|
||||||
|
<para>This is the similar to the previous, but flags it as Python so that BitBake knows it is Python code.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Defining Python functions into the global Python namespace</title>
|
||||||
|
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||||
|
<para><screen>def get_depends(bb, d):
|
||||||
|
if bb.data.getVar('SOMECONDITION', d, True):
|
||||||
|
return "dependencywithcond"
|
||||||
|
else:
|
||||||
|
return "dependency"
|
||||||
|
|
||||||
|
<varname>SOMECONDITION</varname> = "1"
|
||||||
|
<varname>DEPENDS</varname> = "${@get_depends(bb, d)}"</screen></para>
|
||||||
|
<para>This would result in <varname>DEPENDS</varname> containing <literal>dependencywithcond</literal>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Variable flags</title>
|
||||||
|
<para>Variables can have associated flags which provide a way of tagging extra information onto a variable. Several flags are used internally by BitBake but they can be used externally too if needed. The standard operations mentioned above also work on flags.</para>
|
||||||
|
<para><screen><varname>VARIABLE</varname>[<varname>SOMEFLAG</varname>] = "value"</screen></para>
|
||||||
|
<para>In this example, <varname>VARIABLE</varname> has a flag, <varname>SOMEFLAG</varname> which is set to <literal>value</literal>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Inheritance</title>
|
||||||
|
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||||
|
<para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudimentary form of inheritance. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.oeclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Tasks</title>
|
||||||
|
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||||
|
<para>In BitBake, each step that needs to be run for a given .bb is known as a task. There is a command <literal>addtask</literal> to add new tasks (must be a defined Python executable metadata and must start with <quote>do_</quote>) and describe intertask dependencies.</para>
|
||||||
|
<para><screen>python do_printdate () {
|
||||||
|
import time
|
||||||
|
print time.strftime('%Y%m%d', time.gmtime())
|
||||||
|
}
|
||||||
|
|
||||||
|
addtask printdate before do_build</screen></para>
|
||||||
|
<para>This defines the necessary Python function and adds it as a task which is now a dependency of do_build, the default task. If anyone executes the do_build task, that will result in do_printdate being run first.</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Task Flags</title>
|
||||||
|
<para>Tasks support a number of flags which control various functionality of the task. These are as follows:</para>
|
||||||
|
<para>'dirs' - directories which should be created before the task runs</para>
|
||||||
|
<para>'cleandirs' - directories which should created before the task runs but should be empty</para>
|
||||||
|
<para>'noexec' - marks the tasks as being empty and no execution required. These are used as dependency placeholders or used when added tasks need to be subsequently disabled.</para>
|
||||||
|
<para>'nostamp' - don't generate a stamp file for a task. This means the task is always rexecuted.</para>
|
||||||
|
<para>'fakeroot' - this task needs to be run in a fakeroot environment, obtained by adding the variables in FAKEROOTENV to the environment.</para>
|
||||||
|
<para>'umask' - the umask to run the task under.</para>
|
||||||
|
<para> For the 'deptask', 'rdeptask', 'recdeptask' and 'recrdeptask' flags please see the dependencies section.</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Events</title>
|
||||||
|
<para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
|
||||||
|
<para>BitBake allows installation of event handlers. Events are triggered at certain points during operation, such as the beginning of operation against a given .bb, the start of a given task, task failure, task success, et cetera. The intent is to make it easy to do things like email notification on build failure.</para>
|
||||||
|
<para><screen>addhandler myclass_eventhandler
|
||||||
|
python myclass_eventhandler() {
|
||||||
|
from bb.event import getName
|
||||||
|
from bb import data
|
||||||
|
|
||||||
|
print("The name of the Event is %s" % getName(e))
|
||||||
|
print("The file we run for is %s" % data.getVar('FILE', e.data, True))
|
||||||
|
}
|
||||||
|
</screen></para><para>
|
||||||
|
This event handler gets called every time an event is triggered. A global variable <varname>e</varname> is defined. <varname>e</varname>.data contains an instance of bb.data. With the getName(<varname>e</varname>)
|
||||||
|
method one can get the name of the triggered event.</para><para>The above event handler prints the name
|
||||||
|
of the event and the content of the <varname>FILE</varname> variable.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Variants</title>
|
||||||
|
<para>Two BitBake features exist to facilitate the creation of multiple buildable incarnations from a single recipe file.</para>
|
||||||
|
<para>The first is <varname>BBCLASSEXTEND</varname>. This variable is a space separated list of classes used to "extend" the recipe for each variant. As an example, setting <screen>BBCLASSEXTEND = "native"</screen> results in a second incarnation of the current recipe being available. This second incarantion will have the "native" class inherited.</para>
|
||||||
|
<para>The second feature is <varname>BBVERSIONS</varname>. This variable allows a single recipe to build multiple versions of a project from a single recipe file, and allows you to specify conditional metadata (using the <varname>OVERRIDES</varname> mechanism) for a single version, or an optionally named range of versions:</para>
|
||||||
|
<para><screen>BBVERSIONS = "1.0 2.0 git"
|
||||||
|
SRC_URI_git = "git://someurl/somepath.git"</screen></para>
|
||||||
|
<para><screen>BBVERSIONS = "1.0.[0-6]:1.0.0+ \
|
||||||
|
1.0.[7-9]:1.0.7+"
|
||||||
|
SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;patch=1"</screen></para>
|
||||||
|
<para>Note that the name of the range will default to the original version of the recipe, so given OE, a recipe file of foo_1.0.0+.bb will default the name of its versions to 1.0.0+. This is useful, as the range name is not only placed into overrides; it's also made available for the metadata to use in the form of the <varname>BPV</varname> variable, for use in file:// search paths (<varname>FILESPATH</varname>).</para>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Variable interaction: Worked Examples</title>
|
||||||
|
<para>Despite the documentation of the different forms of variable definition above, it can be hard to work out what happens when variable operators are combined. This section documents some common questions people have regarding the way variables interact.</para>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Override and append ordering</title>
|
||||||
|
|
||||||
|
<para>There is often confusion about which order overrides and the various append operators take effect.</para>
|
||||||
|
|
||||||
|
<para><screen><varname>OVERRIDES</varname> = "foo"
|
||||||
|
<varname>A_foo_append</varname> = "X"</screen></para>
|
||||||
|
<para>In this case, X is unconditionally appended to the variable <varname>A_foo</varname>. Since foo is an override, A_foo would then replace <varname>A</varname>.</para>
|
||||||
|
|
||||||
|
<para><screen><varname>OVERRIDES</varname> = "foo"
|
||||||
|
<varname>A</varname> = "X"
|
||||||
|
<varname>A_append_foo</varname> = "Y"</screen></para>
|
||||||
|
<para>In this case, only when foo is in OVERRIDES, Y is appended to the variable <varname>A</varname> so the value of <varname>A</varname> would become XY (NB: no spaces are appended).</para>
|
||||||
|
|
||||||
|
<para><screen><varname>OVERRIDES</varname> = "foo"
|
||||||
|
<varname>A_foo_append</varname> = "X"
|
||||||
|
<varname>A_foo_append</varname> += "Y"</screen></para>
|
||||||
|
<para>This behaves as per the first case above, but the value of <varname>A</varname> would be "X Y" instead of just "X".</para>
|
||||||
|
|
||||||
|
<para><screen><varname>A</varname> = "1"
|
||||||
|
<varname>A_append</varname> = "2"
|
||||||
|
<varname>A_append</varname> = "3"
|
||||||
|
<varname>A</varname> += "4"
|
||||||
|
<varname>A</varname> .= "5"</screen></para>
|
||||||
|
|
||||||
|
<para>Would ultimately result in <varname>A</varname> taking the value "1 4523" since the _append operator executes at the same time as the expansion of other overrides.</para>
|
||||||
|
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Key Expansion</title>
|
||||||
|
|
||||||
|
<para>Key expansion happens at the data store finalisation time just before overrides are expanded.</para>
|
||||||
|
|
||||||
|
<para><screen><varname>A${B}</varname> = "X"
|
||||||
|
<varname>B</varname> = "2"
|
||||||
|
<varname>A2</varname> = "Y"</screen></para>
|
||||||
|
<para>So in this case <varname>A2</varname> would take the value of "X".</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Dependency handling</title>
|
||||||
|
<para>BitBake 1.7.x onwards works with the metadata at the task level since this is optimal when dealing with multiple threads of execution. A robust method of specifing task dependencies is therefore needed. </para>
|
||||||
|
<section>
|
||||||
|
<title>Dependencies internal to the .bb file</title>
|
||||||
|
<para>Where the dependencies are internal to a given .bb file, the dependencies are handled by the previously detailed addtask directive.</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>DEPENDS</title>
|
||||||
|
<para>DEPENDS lists build time dependencies. The 'deptask' flag for tasks is used to signify the task of each item listed in DEPENDS which must have completed before that task can be executed.</para>
|
||||||
|
<para><screen>do_configure[deptask] = "do_populate_staging"</screen></para>
|
||||||
|
<para>means the do_populate_staging task of each item in DEPENDS must have completed before do_configure can execute.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>RDEPENDS</title>
|
||||||
|
<para>RDEPENDS lists runtime dependencies. The 'rdeptask' flag for tasks is used to signify the task of each item listed in RDEPENDS which must have completed before that task can be executed.</para>
|
||||||
|
<para><screen>do_package_write[rdeptask] = "do_package"</screen></para>
|
||||||
|
<para>means the do_package task of each item in RDEPENDS must have completed before do_package_write can execute.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Recursive DEPENDS</title>
|
||||||
|
<para>These are specified with the 'recdeptask' flag and is used signify the task(s) of each DEPENDS which must have completed before that task can be executed. It applies recursively so the DEPENDS of each item in the original DEPENDS must be met and so on.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Recursive RDEPENDS</title>
|
||||||
|
<para>These are specified with the 'recrdeptask' flag and is used signify the task(s) of each RDEPENDS which must have completed before that task can be executed. It applies recursively so the RDEPENDS of each item in the original RDEPENDS must be met and so on. It also runs all DEPENDS first.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Inter task</title>
|
||||||
|
<para>The 'depends' flag for tasks is a more generic form of which allows an interdependency on specific tasks rather than specifying the data in DEPENDS or RDEPENDS.</para>
|
||||||
|
<para><screen>do_patch[depends] = "quilt-native:do_populate_staging"</screen></para>
|
||||||
|
<para>means the do_populate_staging task of the target quilt-native must have completed before the do_patch can execute.</para>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Parsing</title>
|
||||||
|
<section>
|
||||||
|
<title>Configuration files</title>
|
||||||
|
<para>The first kind of metadata in BitBake is configuration metadata. This metadata is global, and therefore affects <emphasis>all</emphasis> packages and tasks which are executed.</para>
|
||||||
|
<para>BitBake will first search the current working directory for an optional "conf/bblayers.conf" configuration file. This file is expected to contain a BBLAYERS variable which is a space delimited list of 'layer' directories. For each directory in this list, a "conf/layer.conf" file will be searched for and parsed with the LAYERDIR variable being set to the directory where the layer was found. The idea is these files will setup BBPATH and other variables correctly for a given build directory automatically for the user.</para>
|
||||||
|
<para>BitBake will then expect to find 'conf/bitbake.conf' somewhere in the user specified <envar>BBPATH</envar>. That configuration file generally has include directives to pull in any other metadata (generally files specific to architecture, machine, <emphasis>local</emphasis> and so on).</para>
|
||||||
|
<para>Only variable definitions and include directives are allowed in .conf files.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Classes</title>
|
||||||
|
<para>BitBake classes are our rudimentary inheritance mechanism. As briefly mentioned in the metadata introduction, they're parsed when an <literal>inherit</literal> directive is encountered, and they are located in classes/ relative to the directories in <envar>BBPATH</envar>.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>.bb files</title>
|
||||||
|
<para>A BitBake (.bb) file is a logical unit of tasks to be executed. Normally this is a package to be built. Inter-.bb dependencies are obeyed. The files themselves are located via the <varname>BBFILES</varname> variable, which is set to a space separated list of .bb files, and does handle wildcards.</para>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
</chapter>
|
||||||
|
|
||||||
|
<chapter>
|
||||||
|
<title>File download support</title>
|
||||||
|
<section>
|
||||||
|
<title>Overview</title>
|
||||||
|
<para>BitBake provides support to download files this procedure is called fetching and it handled by the fetch and fetch2 modules. At this point the original fetch code is considered to be replaced by fetch2 and this manual only related to the fetch2 codebase.</para>
|
||||||
|
|
||||||
|
<para>The SRC_URI is normally used to tell BitBake which files to fetch. The next sections will describe the available fetchers and their options. Each fetcher honors a set of variables and per URI parameters separated by a <quote>;</quote> consisting of a key and a value. The semantics of the variables and parameters are defined by the fetcher. BitBake tries to have consistent semantics between the different fetchers.
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>The overall fetch process is that first, fetches are attempted from PREMIRRORS. If those don't work, the original SRC_URI is attempted and if that fails, BitBake will fall back to MIRRORS. Cross urls are supported, so its possible to mirror a git repository on an http server as a tarball for example. Some example commonly used mirror definitions are:</para>
|
||||||
|
|
||||||
|
<para><screen><varname>PREMIRRORS</varname> ?= "\
|
||||||
|
bzr://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
cvs://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
git://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
hg://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
osc://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
p4://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
svk://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
svn://.*/.* http://somemirror.org/sources/ \n"
|
||||||
|
|
||||||
|
<varname>MIRRORS</varname> =+ "\
|
||||||
|
ftp://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
http://.*/.* http://somemirror.org/sources/ \n \
|
||||||
|
https://.*/.* http://somemirror.org/sources/ \n"</screen></para>
|
||||||
|
|
||||||
|
<para>Non-local downloaded output is placed into the directory specified by the <varname>DL_DIR</varname>. For non local downloads the code can check checksums for the download to ensure the file has been downloaded correctly. These are specified in the form <varname>SRC_URI[md5sum]</varname> for the md5 checksum and <varname>SRC_URI[sha256sum]</varname> for the sha256 checksum. If <varname>BB_STRICT_CHECKSUM</varname> is set, any download without a checksum will trigger an error message. In cases where multiple files are listed in SRC_URI, the name parameter is used assign names to the urls and these are then specified in the checksums in the form SRC_URI[name.sha256sum].</para>
|
||||||
|
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>Local file fetcher</title>
|
||||||
|
<para>The URN for the local file fetcher is <emphasis>file</emphasis>. The filename can be either absolute or relative. If the filename is relative, <varname>FILESPATH</varname> and failing that <varname>FILESDIR</varname> will be used to find the appropriate relative file. The metadata usually extend these variables to include variations of the values in <varname>OVERRIDES</varname>. Single files and complete directories can be specified.
|
||||||
|
<screen><varname>SRC_URI</varname>= "file://relativefile.patch"
|
||||||
|
<varname>SRC_URI</varname>= "file://relativefile.patch;this=ignored"
|
||||||
|
<varname>SRC_URI</varname>= "file:///Users/ich/very_important_software"
|
||||||
|
</screen>
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>CVS fetcher</title>
|
||||||
|
<para>The URN for the CVS fetcher is <emphasis>cvs</emphasis>. This fetcher honors the variables <varname>CVSDIR</varname>, <varname>SRCDATE</varname>, <varname>FETCHCOMMAND_cvs</varname>, <varname>UPDATECOMMAND_cvs</varname>. <varname>DL_DIR</varname> specifies where a temporary checkout is saved. <varname>SRCDATE</varname> specifies which date to use when doing the fetching (the special value of "now" will cause the checkout to be updated on every build). <varname>FETCHCOMMAND</varname> and <varname>UPDATECOMMAND</varname> specify which executables to use for the CVS checkout or update.
|
||||||
|
</para>
|
||||||
|
<para>The supported parameters are <varname>module</varname>, <varname>tag</varname>, <varname>date</varname>, <varname>method</varname>, <varname>localdir</varname>, <varname>rsh</varname> and <varname>scmdata</varname>. The <varname>module</varname> specifies which module to check out, the <varname>tag</varname> describes which CVS TAG should be used for the checkout. By default the TAG is empty. A <varname>date</varname> can be specified to override the SRCDATE of the configuration to checkout a specific date. The special value of "now" will cause the checkout to be updated on every build.<varname>method</varname> is by default <emphasis>pserver</emphasis>. If <emphasis>ext</emphasis> is used the <varname>rsh</varname> parameter will be evaluated and <varname>CVS_RSH</varname> will be set. Finally, <varname>localdir</varname> is used to checkout into a special directory relative to <varname>CVSDIR</varname>.
|
||||||
|
|
||||||
|
<screen><varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
|
||||||
|
<varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
|
||||||
|
</screen>
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>HTTP/FTP fetcher</title>
|
||||||
|
<para>The URNs for the HTTP/FTP fetcher are <emphasis>http</emphasis>, <emphasis>https</emphasis> and <emphasis>ftp</emphasis>. This fetcher honors the variables <varname>FETCHCOMMAND_wget</varname>. <varname>FETCHCOMMAND</varname> contains the command used for fetching. <quote>${URI}</quote> and <quote>${FILES}</quote> will be replaced by the URI and basename of the file to be fetched.
|
||||||
|
</para>
|
||||||
|
<para><screen><varname>SRC_URI</varname> = "http://oe.handhelds.org/not_there.aac"
|
||||||
|
<varname>SRC_URI</varname> = "ftp://oe.handhelds.org/not_there_as_well.aac"
|
||||||
|
<varname>SRC_URI</varname> = "ftp://you@oe.handheld.sorg/home/you/secret.plan"
|
||||||
|
</screen></para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>SVN fetcher</title>
|
||||||
|
<para>The URN for the SVN fetcher is <emphasis>svn</emphasis>.
|
||||||
|
</para>
|
||||||
|
<para>This fetcher honors the variables <varname>FETCHCOMMAND_svn</varname>, <varname>SVNDIR</varname>, <varname>SRCREV</varname>. <varname>FETCHCOMMAND</varname> contains the subversion command. <varname>SRCREV</varname> specifies which revision to use when doing the fetching.
|
||||||
|
</para>
|
||||||
|
<para>The supported parameters are <varname>proto</varname>, <varname>rev</varname> and <varname>scmdata</varname>. <varname>proto</varname> is the Subversion protocol, <varname>rev</varname> is the Subversion revision. If <varname>scmdata</varname> is set to <quote>keep</quote>, the <quote>.svn</quote> directories will be available during compile-time.
|
||||||
|
</para>
|
||||||
|
<para><screen><varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667"
|
||||||
|
<varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126"
|
||||||
|
</screen></para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section>
|
||||||
|
<title>GIT fetcher</title>
|
||||||
|
<para>The URN for the GIT Fetcher is <emphasis>git</emphasis>.
|
||||||
|
</para>
|
||||||
|
<para>The variable <varname>GITDIR</varname> will be used as the base directory where the git tree is cloned to.
|
||||||
|
</para>
|
||||||
|
<para>The parameters are <emphasis>tag</emphasis>, <emphasis>protocol</emphasis> and <emphasis>scmdata</emphasis>. <emphasis>tag</emphasis> is a Git tag, the default is <quote>master</quote>. <emphasis>protocol</emphasis> is the Git protocol to use and defaults to <quote>git</quote> if a hostname is set, otherwise its <quote>file</quote>. If <emphasis>scmdata</emphasis> is set to <quote>keep</quote>, the <quote>.git</quote> directory will be available during compile-time.
|
||||||
|
</para>
|
||||||
|
<para><screen><varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
|
||||||
|
<varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
|
||||||
|
</screen></para>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
</chapter>
|
||||||
|
|
||||||
|
|
||||||
|
<chapter>
|
||||||
|
<title>The BitBake command</title>
|
||||||
|
<section>
|
||||||
|
<title>Introduction</title>
|
||||||
|
<para>bitbake is the primary command in the system. It facilitates executing tasks in a single .bb file, or executing a given task on a set of multiple .bb files, accounting for interdependencies amongst them.</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Usage and syntax</title>
|
||||||
|
<para>
|
||||||
|
<screen><prompt>$ </prompt>bitbake --help
|
||||||
|
usage: bitbake [options] [package ...]
|
||||||
|
|
||||||
|
Executes the specified task (default is 'build') for a given set of BitBake files.
|
||||||
|
It expects that BBFILES is defined, which is a space separated list of files to
|
||||||
|
be executed. BBFILES does support wildcards.
|
||||||
|
Default BBFILES are the .bb files in the current directory.
|
||||||
|
|
||||||
|
options:
|
||||||
|
--version show program's version number and exit
|
||||||
|
-h, --help show this help message and exit
|
||||||
|
-b BUILDFILE, --buildfile=BUILDFILE
|
||||||
|
execute the task against this .bb file, rather than a
|
||||||
|
package from BBFILES.
|
||||||
|
-k, --continue continue as much as possible after an error. While the
|
||||||
|
target that failed, and those that depend on it,
|
||||||
|
cannot be remade, the other dependencies of these
|
||||||
|
targets can be processed all the same.
|
||||||
|
-f, --force force run of specified cmd, regardless of stamp status
|
||||||
|
-i, --interactive drop into the interactive mode also called the BitBake
|
||||||
|
shell.
|
||||||
|
-c CMD, --cmd=CMD Specify task to execute. Note that this only executes
|
||||||
|
the specified task for the providee and the packages
|
||||||
|
it depends on, i.e. 'compile' does not implicitly call
|
||||||
|
stage for the dependencies (IOW: use only if you know
|
||||||
|
what you are doing). Depending on the base.bbclass a
|
||||||
|
listtasks task is defined and will show available
|
||||||
|
tasks
|
||||||
|
-r FILE, --read=FILE read the specified file before bitbake.conf
|
||||||
|
-v, --verbose output more chit-chat to the terminal
|
||||||
|
-D, --debug Increase the debug level. You can specify this more
|
||||||
|
than once.
|
||||||
|
-n, --dry-run don't execute, just go through the motions
|
||||||
|
-p, --parse-only quit after parsing the BB files (developers only)
|
||||||
|
-s, --show-versions show current and preferred versions of all packages
|
||||||
|
-e, --environment show the global or per-package environment (this is
|
||||||
|
what used to be bbread)
|
||||||
|
-g, --graphviz emit the dependency trees of the specified packages in
|
||||||
|
the dot syntax
|
||||||
|
-I IGNORED_DOT_DEPS, --ignore-deps=IGNORED_DOT_DEPS
|
||||||
|
Stop processing at the given list of dependencies when
|
||||||
|
generating dependency graphs. This can help to make
|
||||||
|
the graph more appealing
|
||||||
|
-l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
|
||||||
|
Show debug logging for the specified logging domains
|
||||||
|
-P, --profile profile the command and print a report
|
||||||
|
|
||||||
|
|
||||||
|
</screen>
|
||||||
|
</para>
|
||||||
|
<para>
|
||||||
|
<example>
|
||||||
|
<title>Executing a task against a single .bb</title>
|
||||||
|
<para>Executing tasks for a single file is relatively simple. You specify the file in question, and BitBake parses it and executes the specified task (or <quote>build</quote> by default). It obeys intertask dependencies when doing so.</para>
|
||||||
|
<para><quote>clean</quote> task:</para>
|
||||||
|
<para><screen><prompt>$ </prompt>bitbake -b blah_1.0.bb -c clean</screen></para>
|
||||||
|
<para><quote>build</quote> task:</para>
|
||||||
|
<para><screen><prompt>$ </prompt>bitbake -b blah_1.0.bb</screen></para>
|
||||||
|
</example>
|
||||||
|
</para>
|
||||||
|
<para>
|
||||||
|
<example>
|
||||||
|
<title>Executing tasks against a set of .bb files</title>
|
||||||
|
<para>There are a number of additional complexities introduced when one wants to manage multiple .bb files. Clearly there needs to be a way to tell BitBake what files are available, and of those, which we want to execute at this time. There also needs to be a way for each .bb to express its dependencies, both for build time and runtime. There must be a way for the user to express their preferences when multiple .bb's provide the same functionality, or when there are multiple versions of a .bb.</para>
|
||||||
|
<para>The next section, Metadata, outlines how to specify such things.</para>
|
||||||
|
<para>Note that the bitbake command, when not using --buildfile, accepts a <varname>PROVIDER</varname>, not a filename or anything else. By default, a .bb generally PROVIDES its packagename, packagename-version, and packagename-version-revision.</para>
|
||||||
|
<screen><prompt>$ </prompt>bitbake blah</screen>
|
||||||
|
<screen><prompt>$ </prompt>bitbake blah-1.0</screen>
|
||||||
|
<screen><prompt>$ </prompt>bitbake blah-1.0-r0</screen>
|
||||||
|
<screen><prompt>$ </prompt>bitbake -c clean blah</screen>
|
||||||
|
<screen><prompt>$ </prompt>bitbake virtual/whatever</screen>
|
||||||
|
<screen><prompt>$ </prompt>bitbake -c clean virtual/whatever</screen>
|
||||||
|
</example>
|
||||||
|
<example>
|
||||||
|
<title>Generating dependency graphs</title>
|
||||||
|
<para>BitBake is able to generate dependency graphs using the dot syntax. These graphs can be converted
|
||||||
|
to images using the <application>dot</application> application from <ulink url="http://www.graphviz.org">Graphviz</ulink>.
|
||||||
|
Two files will be written into the current working directory, <emphasis>depends.dot</emphasis> containing dependency information at the package level and <emphasis>task-depends.dot</emphasis> containing a breakdown of the dependencies at the task level. To stop depending on common depends, one can use the <prompt>-I depend</prompt> to omit these from the graph. This can lead to more readable graphs. This way, <varname>DEPENDS</varname> from inherited classes such as base.bbclass can be removed from the graph.</para>
|
||||||
|
<screen><prompt>$ </prompt>bitbake -g blah</screen>
|
||||||
|
<screen><prompt>$ </prompt>bitbake -g -I virtual/whatever -I bloom blah</screen>
|
||||||
|
</example>
|
||||||
|
</para>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Special variables</title>
|
||||||
|
<para>Certain variables affect BitBake operation:</para>
|
||||||
|
<section>
|
||||||
|
<title><varname>BB_NUMBER_THREADS</varname></title>
|
||||||
|
<para> The number of threads BitBake should run at once (default: 1).</para>
|
||||||
|
</section>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<title>Metadata</title>
|
||||||
|
<para>As you may have seen in the usage information, or in the information about .bb files, the <varname>BBFILES</varname> variable is how the BitBake tool locates its files. This variable is a space separated list of files that are available, and supports wildcards.
|
||||||
|
<example>
|
||||||
|
<title>Setting BBFILES</title>
|
||||||
|
<programlisting><varname>BBFILES</varname> = "/path/to/bbfiles/*.bb"</programlisting>
|
||||||
|
</example></para>
|
||||||
|
<para>With regard to dependencies, it expects the .bb to define a <varname>DEPENDS</varname> variable, which contains a space separated list of <quote>package names</quote>, which themselves are the <varname>PN</varname> variable. The <varname>PN</varname> variable is, in general, set to a component of the .bb filename by default.</para>
|
||||||
|
<example>
|
||||||
|
<title>Depending on another .bb</title>
|
||||||
|
<para>a.bb:
|
||||||
|
<screen>PN = "package-a"
|
||||||
|
DEPENDS += "package-b"</screen>
|
||||||
|
</para>
|
||||||
|
<para>b.bb:
|
||||||
|
<screen>PN = "package-b"</screen>
|
||||||
|
</para>
|
||||||
|
</example>
|
||||||
|
<example>
|
||||||
|
<title>Using PROVIDES</title>
|
||||||
|
<para>This example shows the usage of the <varname>PROVIDES</varname> variable, which allows a given .bb to specify what functionality it provides.</para>
|
||||||
|
<para>package1.bb:
|
||||||
|
<screen>PROVIDES += "virtual/package"</screen>
|
||||||
|
</para>
|
||||||
|
<para>package2.bb:
|
||||||
|
<screen>DEPENDS += "virtual/package"</screen>
|
||||||
|
</para>
|
||||||
|
<para>package3.bb:
|
||||||
|
<screen>PROVIDES += "virtual/package"</screen>
|
||||||
|
</para>
|
||||||
|
<para>As you can see, we have two different .bb's that provide the same functionality (virtual/package). Clearly, there needs to be a way for the person running BitBake to control which of those providers gets used. There is, indeed, such a way.</para>
|
||||||
|
<para>The following would go into a .conf file, to select package1:
|
||||||
|
<screen>PREFERRED_PROVIDER_virtual/package = "package1"</screen>
|
||||||
|
</para>
|
||||||
|
</example>
|
||||||
|
<example>
|
||||||
|
<title>Specifying version preference</title>
|
||||||
|
<para>When there are multiple <quote>versions</quote> of a given package, BitBake defaults to selecting the most recent version, unless otherwise specified. If the .bb in question has a <varname>DEFAULT_PREFERENCE</varname> set lower than the other .bb's (default is 0), then it will not be selected. This allows the person or persons maintaining the repository of .bb files to specify their preference for the default selected version. In addition, the user can specify their preferred version.</para>
|
||||||
|
<para>If the first .bb is named <filename>a_1.1.bb</filename>, then the <varname>PN</varname> variable will be set to <quote>a</quote>, and the <varname>PV</varname> variable will be set to 1.1.</para>
|
||||||
|
<para>If we then have an <filename>a_1.2.bb</filename>, BitBake will choose 1.2 by default. However, if we define the following variable in a .conf that BitBake parses, we can change that.
|
||||||
|
<screen>PREFERRED_VERSION_a = "1.1"</screen>
|
||||||
|
</para>
|
||||||
|
</example>
|
||||||
|
<example>
|
||||||
|
<title>Using <quote>bbfile collections</quote></title>
|
||||||
|
<para>bbfile collections exist to allow the user to have multiple repositories of bbfiles that contain the same exact package. For example, one could easily use them to make one's own local copy of an upstream repository, but with custom modifications that one does not want upstream. Usage:</para>
|
||||||
|
<screen>BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb"
|
||||||
|
BBFILE_COLLECTIONS = "upstream local"
|
||||||
|
BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
|
||||||
|
BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
|
||||||
|
BBFILE_PRIORITY_upstream = "5"
|
||||||
|
BBFILE_PRIORITY_local = "10"</screen>
|
||||||
|
</example>
|
||||||
|
</section>
|
||||||
|
</chapter>
|
||||||
|
</book>
|
|
@ -1,59 +0,0 @@
|
||||||
<!ENTITY DISTRO "1.4">
|
|
||||||
<!ENTITY DISTRO_NAME "tbd">
|
|
||||||
<!ENTITY YOCTO_DOC_VERSION "1.4">
|
|
||||||
<!ENTITY POKYVERSION "8.0">
|
|
||||||
<!ENTITY YOCTO_POKY "poky-&DISTRO_NAME;-&POKYVERSION;">
|
|
||||||
<!ENTITY COPYRIGHT_YEAR "2010-2013">
|
|
||||||
<!ENTITY YOCTO_DL_URL "http://downloads.yoctoproject.org">
|
|
||||||
<!ENTITY YOCTO_HOME_URL "http://www.yoctoproject.org">
|
|
||||||
<!ENTITY YOCTO_LISTS_URL "http://lists.yoctoproject.org">
|
|
||||||
<!ENTITY YOCTO_BUGZILLA_URL "http://bugzilla.yoctoproject.org">
|
|
||||||
<!ENTITY YOCTO_WIKI_URL "https://wiki.yoctoproject.org">
|
|
||||||
<!ENTITY YOCTO_AB_URL "http://autobuilder.yoctoproject.org">
|
|
||||||
<!ENTITY YOCTO_GIT_URL "http://git.yoctoproject.org">
|
|
||||||
<!ENTITY YOCTO_ADTREPO_URL "http://adtrepo.yoctoproject.org">
|
|
||||||
<!ENTITY OE_HOME_URL "http://www.openembedded.org">
|
|
||||||
<!ENTITY OE_LISTS_URL "http://lists.linuxtogo.org/cgi-bin/mailman">
|
|
||||||
<!ENTITY OE_DOCS_URL "http://docs.openembedded.org">
|
|
||||||
<!ENTITY OH_HOME_URL "http://o-hand.com">
|
|
||||||
<!ENTITY BITBAKE_HOME_URL "http://developer.berlios.de/projects/bitbake/">
|
|
||||||
<!ENTITY ECLIPSE_MAIN_URL "http://www.eclipse.org/downloads">
|
|
||||||
<!ENTITY ECLIPSE_DL_URL "http://download.eclipse.org">
|
|
||||||
<!ENTITY ECLIPSE_DL_PLUGIN_URL "&YOCTO_DL_URL;/releases/eclipse-plugin/&DISTRO;">
|
|
||||||
<!ENTITY ECLIPSE_UPDATES_URL "&ECLIPSE_DL_URL;/tm/updates/3.3">
|
|
||||||
<!ENTITY ECLIPSE_INDIGO_URL "&ECLIPSE_DL_URL;/releases/indigo">
|
|
||||||
<!ENTITY ECLIPSE_JUNO_URL "&ECLIPSE_DL_URL;/releases/juno">
|
|
||||||
<!ENTITY ECLIPSE_INDIGO_CDT_URL "&ECLIPSE_DL_URL;tools/cdt/releases/indigo">
|
|
||||||
<!ENTITY YOCTO_DOCS_URL "&YOCTO_HOME_URL;/docs">
|
|
||||||
<!ENTITY YOCTO_SOURCES_URL "&YOCTO_HOME_URL;/sources/">
|
|
||||||
<!ENTITY YOCTO_AB_PORT_URL "&YOCTO_AB_URL;:8010">
|
|
||||||
<!ENTITY YOCTO_AB_NIGHTLY_URL "&YOCTO_AB_URL;/nightly/">
|
|
||||||
<!ENTITY YOCTO_POKY_URL "&YOCTO_DL_URL;/releases/poky/">
|
|
||||||
<!ENTITY YOCTO_RELEASE_DL_URL "&YOCTO_DL_URL;/releases/yocto/yocto-&DISTRO;">
|
|
||||||
<!ENTITY YOCTO_TOOLCHAIN_DL_URL "&YOCTO_RELEASE_DL_URL;/toolchain/">
|
|
||||||
<!ENTITY YOCTO_ECLIPSE_DL_URL "&YOCTO_RELEASE_DL_URL;/eclipse-plugin/indigo;">
|
|
||||||
<!ENTITY YOCTO_ADTINSTALLER_DL_URL "&YOCTO_RELEASE_DL_URL;/adt_installer">
|
|
||||||
<!ENTITY YOCTO_POKY_DL_URL "&YOCTO_RELEASE_DL_URL;/&YOCTO_POKY;.tar.bz2">
|
|
||||||
<!ENTITY YOCTO_MACHINES_DL_URL "&YOCTO_RELEASE_DL_URL;/machines">
|
|
||||||
<!ENTITY YOCTO_QEMU_DL_URL "&YOCTO_MACHINES_DL_URL;/qemu">
|
|
||||||
<!ENTITY YOCTO_PYTHON-i686_DL_URL "&YOCTO_DL_URL;/releases/miscsupport/python-nativesdk-standalone-i686.tar.bz2">
|
|
||||||
<!ENTITY YOCTO_PYTHON-x86_64_DL_URL "&YOCTO_DL_URL;/releases/miscsupport/python-nativesdk-standalone-x86_64.tar.bz2">
|
|
||||||
<!ENTITY YOCTO_DOCS_QS_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/yocto-project-qs/yocto-project-qs.html">
|
|
||||||
<!ENTITY YOCTO_DOCS_ADT_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/adt-manual/adt-manual.html">
|
|
||||||
<!ENTITY YOCTO_DOCS_REF_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/ref-manual/ref-manual.html">
|
|
||||||
<!ENTITY YOCTO_DOCS_BSP_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/bsp-guide/bsp-guide.html">
|
|
||||||
<!ENTITY YOCTO_DOCS_DEV_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/dev-manual/dev-manual.html">
|
|
||||||
<!ENTITY YOCTO_DOCS_KERNEL_URL "&YOCTO_DOCS_URL;/&YOCTO_DOC_VERSION;/kernel-manual/kernel-manual.html">
|
|
||||||
<!ENTITY YOCTO_ADTPATH_DIR "/opt/poky/&DISTRO;">
|
|
||||||
<!ENTITY YOCTO_POKY_TARBALL "&YOCTO_POKY;.tar.bz2">
|
|
||||||
<!ENTITY OE_INIT_PATH "&YOCTO_POKY;/oe-init-build-env">
|
|
||||||
<!ENTITY OE_INIT_FILE "oe-init-build-env">
|
|
||||||
<!ENTITY UBUNTU_HOST_PACKAGES_ESSENTIAL "gawk wget git-core diffstat unzip texinfo \
|
|
||||||
build-essential chrpath">
|
|
||||||
<!ENTITY FEDORA_HOST_PACKAGES_ESSENTIAL "gawk make wget tar bzip2 gzip python unzip perl patch \
|
|
||||||
diffutils diffstat git cpp gcc gcc-c++ eglibc-devel texinfo chrpath \
|
|
||||||
ccache">
|
|
||||||
<!ENTITY OPENSUSE_HOST_PACKAGES_ESSENTIAL "python gcc gcc-c++ git chrpath make wget python-xml \
|
|
||||||
diffstat texinfo python-curses">
|
|
||||||
<!ENTITY CENTOS_HOST_PACKAGES_ESSENTIAL "gawk make wget tar bzip2 gzip python unzip perl patch \
|
|
||||||
diffutils diffstat git cpp gcc gcc-c++ glibc-devel texinfo chrpath">
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
|
@ -1,39 +0,0 @@
|
||||||
<xsl:stylesheet version="1.0"
|
|
||||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
|
||||||
xmlns:d="http://docbook.org/ns/docbook"
|
|
||||||
xmlns="http://www.w3.org/1999/xhtml"
|
|
||||||
exclude-result-prefixes="d">
|
|
||||||
|
|
||||||
<xsl:template name="component.title">
|
|
||||||
<xsl:param name="node" select="."/>
|
|
||||||
|
|
||||||
<xsl:variable name="level">
|
|
||||||
<xsl:choose>
|
|
||||||
<xsl:when test="ancestor::d:section">
|
|
||||||
<xsl:value-of select="count(ancestor::d:section)+1"/>
|
|
||||||
</xsl:when>
|
|
||||||
<xsl:when test="ancestor::d:sect5">6</xsl:when>
|
|
||||||
<xsl:when test="ancestor::d:sect4">5</xsl:when>
|
|
||||||
<xsl:when test="ancestor::d:sect3">4</xsl:when>
|
|
||||||
<xsl:when test="ancestor::d:sect2">3</xsl:when>
|
|
||||||
<xsl:when test="ancestor::d:sect1">2</xsl:when>
|
|
||||||
<xsl:otherwise>1</xsl:otherwise>
|
|
||||||
</xsl:choose>
|
|
||||||
</xsl:variable>
|
|
||||||
<xsl:element name="h{$level+1}" namespace="http://www.w3.org/1999/xhtml">
|
|
||||||
<xsl:attribute name="class">title</xsl:attribute>
|
|
||||||
<xsl:if test="$generate.id.attributes = 0">
|
|
||||||
<xsl:call-template name="anchor">
|
|
||||||
<xsl:with-param name="node" select="$node"/>
|
|
||||||
<xsl:with-param name="conditional" select="0"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</xsl:if>
|
|
||||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
|
||||||
<xsl:with-param name="allow-anchors" select="1"/>
|
|
||||||
</xsl:apply-templates>
|
|
||||||
<xsl:call-template name="permalink">
|
|
||||||
<xsl:with-param name="node" select="$node"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</xsl:element>
|
|
||||||
</xsl:template>
|
|
||||||
</xsl:stylesheet>
|
|
|
@ -1,64 +0,0 @@
|
||||||
<?xml version='1.0'?>
|
|
||||||
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns="http://www.w3.org/1999/xhtml" xmlns:fo="http://www.w3.org/1999/XSL/Format" version="1.0">
|
|
||||||
|
|
||||||
<xsl:import href="http://docbook.sourceforge.net/release/xsl/current/fo/docbook.xsl" />
|
|
||||||
|
|
||||||
<!-- check project-plan.sh for how this is generated, needed to tweak
|
|
||||||
the cover page
|
|
||||||
-->
|
|
||||||
<xsl:include href="/tmp/titlepage.xsl"/>
|
|
||||||
|
|
||||||
<!-- To force a page break in document, i.e per section add a
|
|
||||||
<?hard-pagebreak?> tag.
|
|
||||||
-->
|
|
||||||
<xsl:template match="processing-instruction('hard-pagebreak')">
|
|
||||||
<fo:block break-before='page' />
|
|
||||||
</xsl:template>
|
|
||||||
|
|
||||||
<!--Fix for defualt indent getting TOC all wierd..
|
|
||||||
See http://sources.redhat.com/ml/docbook-apps/2005-q1/msg00455.html
|
|
||||||
FIXME: must be a better fix
|
|
||||||
-->
|
|
||||||
<xsl:param name="body.start.indent" select="'0'"/>
|
|
||||||
<!--<xsl:param name="title.margin.left" select="'0'"/>-->
|
|
||||||
|
|
||||||
<!-- stop long-ish header titles getting wrapped -->
|
|
||||||
<xsl:param name="header.column.widths">1 10 1</xsl:param>
|
|
||||||
|
|
||||||
<!-- customise headers and footers a little -->
|
|
||||||
|
|
||||||
<xsl:template name="head.sep.rule">
|
|
||||||
<xsl:if test="$header.rule != 0">
|
|
||||||
<xsl:attribute name="border-bottom-width">0.5pt</xsl:attribute>
|
|
||||||
<xsl:attribute name="border-bottom-style">solid</xsl:attribute>
|
|
||||||
<xsl:attribute name="border-bottom-color">#cccccc</xsl:attribute>
|
|
||||||
</xsl:if>
|
|
||||||
</xsl:template>
|
|
||||||
|
|
||||||
<xsl:template name="foot.sep.rule">
|
|
||||||
<xsl:if test="$footer.rule != 0">
|
|
||||||
<xsl:attribute name="border-top-width">0.5pt</xsl:attribute>
|
|
||||||
<xsl:attribute name="border-top-style">solid</xsl:attribute>
|
|
||||||
<xsl:attribute name="border-top-color">#cccccc</xsl:attribute>
|
|
||||||
</xsl:if>
|
|
||||||
</xsl:template>
|
|
||||||
|
|
||||||
<xsl:attribute-set name="header.content.properties">
|
|
||||||
<xsl:attribute name="color">#cccccc</xsl:attribute>
|
|
||||||
</xsl:attribute-set>
|
|
||||||
|
|
||||||
<xsl:attribute-set name="footer.content.properties">
|
|
||||||
<xsl:attribute name="color">#cccccc</xsl:attribute>
|
|
||||||
</xsl:attribute-set>
|
|
||||||
|
|
||||||
|
|
||||||
<!-- general settings -->
|
|
||||||
|
|
||||||
<xsl:param name="fop1.extensions" select="1"></xsl:param>
|
|
||||||
<xsl:param name="paper.type" select="'A4'"></xsl:param>
|
|
||||||
<xsl:param name="section.autolabel" select="1"></xsl:param>
|
|
||||||
<xsl:param name="body.font.family" select="'verasans'"></xsl:param>
|
|
||||||
<xsl:param name="title.font.family" select="'verasans'"></xsl:param>
|
|
||||||
<xsl:param name="monospace.font.family" select="'veramono'"></xsl:param>
|
|
||||||
|
|
||||||
</xsl:stylesheet>
|
|
|
@ -1,25 +0,0 @@
|
||||||
<xsl:stylesheet version="1.0"
|
|
||||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
|
||||||
xmlns:d="http://docbook.org/ns/docbook"
|
|
||||||
xmlns="http://www.w3.org/1999/xhtml"
|
|
||||||
exclude-result-prefixes="d">
|
|
||||||
|
|
||||||
<xsl:template name="division.title">
|
|
||||||
<xsl:param name="node" select="."/>
|
|
||||||
|
|
||||||
<h1>
|
|
||||||
<xsl:attribute name="class">title</xsl:attribute>
|
|
||||||
<xsl:call-template name="anchor">
|
|
||||||
<xsl:with-param name="node" select="$node"/>
|
|
||||||
<xsl:with-param name="conditional" select="0"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
<xsl:apply-templates select="$node" mode="object.title.markup">
|
|
||||||
<xsl:with-param name="allow-anchors" select="1"/>
|
|
||||||
</xsl:apply-templates>
|
|
||||||
<xsl:call-template name="permalink">
|
|
||||||
<xsl:with-param name="node" select="$node"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</h1>
|
|
||||||
</xsl:template>
|
|
||||||
</xsl:stylesheet>
|
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 24 KiB |
|
@ -1,58 +0,0 @@
|
||||||
<fop version="1.0">
|
|
||||||
|
|
||||||
<!-- Strict user configuration -->
|
|
||||||
<strict-configuration>true</strict-configuration>
|
|
||||||
|
|
||||||
<!-- Strict FO validation -->
|
|
||||||
<strict-validation>true</strict-validation>
|
|
||||||
|
|
||||||
<!--
|
|
||||||
Set the baseDir so common/openedhand.svg references in plans still
|
|
||||||
work ok. Note, relative file references to current dir should still work.
|
|
||||||
-->
|
|
||||||
<base>../template</base>
|
|
||||||
<font-base>../template</font-base>
|
|
||||||
|
|
||||||
<!-- Source resolution in dpi (dots/pixels per inch) for determining the
|
|
||||||
size of pixels in SVG and bitmap images, default: 72dpi -->
|
|
||||||
<!-- <source-resolution>72</source-resolution> -->
|
|
||||||
<!-- Target resolution in dpi (dots/pixels per inch) for specifying the
|
|
||||||
target resolution for generated bitmaps, default: 72dpi -->
|
|
||||||
<!-- <target-resolution>72</target-resolution> -->
|
|
||||||
|
|
||||||
<!-- default page-height and page-width, in case
|
|
||||||
value is specified as auto -->
|
|
||||||
<default-page-settings height="11in" width="8.26in"/>
|
|
||||||
|
|
||||||
<!-- <use-cache>false</use-cache> -->
|
|
||||||
|
|
||||||
<renderers>
|
|
||||||
<renderer mime="application/pdf">
|
|
||||||
<fonts>
|
|
||||||
<font metrics-file="VeraMono.xml"
|
|
||||||
kerning="yes"
|
|
||||||
embed-url="VeraMono.ttf">
|
|
||||||
<font-triplet name="veramono" style="normal" weight="normal"/>
|
|
||||||
</font>
|
|
||||||
|
|
||||||
<font metrics-file="VeraMoBd.xml"
|
|
||||||
kerning="yes"
|
|
||||||
embed-url="VeraMoBd.ttf">
|
|
||||||
<font-triplet name="veramono" style="normal" weight="bold"/>
|
|
||||||
</font>
|
|
||||||
|
|
||||||
<font metrics-file="Vera.xml"
|
|
||||||
kerning="yes"
|
|
||||||
embed-url="Vera.ttf">
|
|
||||||
<font-triplet name="verasans" style="normal" weight="normal"/>
|
|
||||||
<font-triplet name="verasans" style="normal" weight="bold"/>
|
|
||||||
<font-triplet name="verasans" style="italic" weight="normal"/>
|
|
||||||
<font-triplet name="verasans" style="italic" weight="bold"/>
|
|
||||||
</font>
|
|
||||||
|
|
||||||
<auto-detect/>
|
|
||||||
</fonts>
|
|
||||||
</renderer>
|
|
||||||
</renderers>
|
|
||||||
</fop>
|
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
<xsl:stylesheet version="1.0"
|
|
||||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
|
||||||
xmlns:d="http://docbook.org/ns/docbook"
|
|
||||||
xmlns="http://www.w3.org/1999/xhtml"
|
|
||||||
exclude-result-prefixes="d">
|
|
||||||
|
|
||||||
<xsl:template name="formal.object.heading">
|
|
||||||
<xsl:param name="object" select="."/>
|
|
||||||
<xsl:param name="title">
|
|
||||||
<xsl:apply-templates select="$object" mode="object.title.markup">
|
|
||||||
<xsl:with-param name="allow-anchors" select="1"/>
|
|
||||||
</xsl:apply-templates>
|
|
||||||
</xsl:param>
|
|
||||||
<p class="title">
|
|
||||||
<b><xsl:copy-of select="$title"/></b>
|
|
||||||
<xsl:call-template name="permalink">
|
|
||||||
<xsl:with-param name="node" select="$object"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</p>
|
|
||||||
</xsl:template>
|
|
||||||
</xsl:stylesheet>
|
|
|
@ -1,14 +0,0 @@
|
||||||
<xsl:stylesheet version="1.0"
|
|
||||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
|
||||||
xmlns:d="http://docbook.org/ns/docbook"
|
|
||||||
xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
|
|
||||||
<xsl:template match="glossentry/glossterm">
|
|
||||||
<xsl:apply-imports/>
|
|
||||||
<xsl:if test="$generate.permalink != 0">
|
|
||||||
<xsl:call-template name="permalink">
|
|
||||||
<xsl:with-param name="node" select=".."/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</xsl:if>
|
|
||||||
</xsl:template>
|
|
||||||
</xsl:stylesheet>
|
|
|
@ -1,25 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<xsl:stylesheet version="1.0"
|
|
||||||
xmlns="http://www.w3.org/1999/xhtml"
|
|
||||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
|
||||||
|
|
||||||
<xsl:param name="generate.permalink" select="1"/>
|
|
||||||
<xsl:param name="permalink.text">¶</xsl:param>
|
|
||||||
|
|
||||||
<xsl:template name="permalink">
|
|
||||||
<xsl:param name="node"/>
|
|
||||||
|
|
||||||
<xsl:if test="$generate.permalink != '0'">
|
|
||||||
<span class="permalink">
|
|
||||||
<a alt="Permalink" title="Permalink">
|
|
||||||
<xsl:attribute name="href">
|
|
||||||
<xsl:call-template name="href.target">
|
|
||||||
<xsl:with-param name="object" select="$node"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</xsl:attribute>
|
|
||||||
<xsl:copy-of select="$permalink.text"/>
|
|
||||||
</a>
|
|
||||||
</span>
|
|
||||||
</xsl:if>
|
|
||||||
</xsl:template>
|
|
||||||
</xsl:stylesheet>
|
|
|
@ -1,55 +0,0 @@
|
||||||
<xsl:stylesheet version="1.0"
|
|
||||||
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
|
|
||||||
xmlns:d="http://docbook.org/ns/docbook"
|
|
||||||
xmlns="http://www.w3.org/1999/xhtml" exclude-result-prefixes="d">
|
|
||||||
|
|
||||||
<xsl:template name="section.title">
|
|
||||||
<xsl:variable name="section"
|
|
||||||
select="(ancestor::section |
|
|
||||||
ancestor::simplesect|
|
|
||||||
ancestor::sect1|
|
|
||||||
ancestor::sect2|
|
|
||||||
ancestor::sect3|
|
|
||||||
ancestor::sect4|
|
|
||||||
ancestor::sect5)[last()]"/>
|
|
||||||
|
|
||||||
<xsl:variable name="renderas">
|
|
||||||
<xsl:choose>
|
|
||||||
<xsl:when test="$section/@renderas = 'sect1'">1</xsl:when>
|
|
||||||
<xsl:when test="$section/@renderas = 'sect2'">2</xsl:when>
|
|
||||||
<xsl:when test="$section/@renderas = 'sect3'">3</xsl:when>
|
|
||||||
<xsl:when test="$section/@renderas = 'sect4'">4</xsl:when>
|
|
||||||
<xsl:when test="$section/@renderas = 'sect5'">5</xsl:when>
|
|
||||||
<xsl:otherwise><xsl:value-of select="''"/></xsl:otherwise>
|
|
||||||
</xsl:choose>
|
|
||||||
</xsl:variable>
|
|
||||||
|
|
||||||
<xsl:variable name="level">
|
|
||||||
<xsl:choose>
|
|
||||||
<xsl:when test="$renderas != ''">
|
|
||||||
<xsl:value-of select="$renderas"/>
|
|
||||||
</xsl:when>
|
|
||||||
<xsl:otherwise>
|
|
||||||
<xsl:call-template name="section.level">
|
|
||||||
<xsl:with-param name="node" select="$section"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</xsl:otherwise>
|
|
||||||
</xsl:choose>
|
|
||||||
</xsl:variable>
|
|
||||||
|
|
||||||
<xsl:call-template name="section.heading">
|
|
||||||
<xsl:with-param name="section" select="$section"/>
|
|
||||||
<xsl:with-param name="level" select="$level"/>
|
|
||||||
<xsl:with-param name="title">
|
|
||||||
<xsl:apply-templates select="$section" mode="object.title.markup">
|
|
||||||
<xsl:with-param name="allow-anchors" select="1"/>
|
|
||||||
</xsl:apply-templates>
|
|
||||||
<xsl:if test="$level > 0">
|
|
||||||
<xsl:call-template name="permalink">
|
|
||||||
<xsl:with-param name="node" select="$section"/>
|
|
||||||
</xsl:call-template>
|
|
||||||
</xsl:if>
|
|
||||||
</xsl:with-param>
|
|
||||||
</xsl:call-template>
|
|
||||||
</xsl:template>
|
|
||||||
</xsl:stylesheet>
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,51 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
if [ -z "$1" -o -z "$2" ]; then
|
|
||||||
echo "usage: [-v] $0 <docbook file> <templatedir>"
|
|
||||||
echo
|
|
||||||
echo "*NOTE* you need xsltproc, fop and nwalsh docbook stylesheets"
|
|
||||||
echo " installed for this to work!"
|
|
||||||
echo
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
FO=`echo $1 | sed s/.xml/.fo/` || exit 1
|
|
||||||
PDF=`echo $1 | sed s/.xml/.pdf/` || exit 1
|
|
||||||
TEMPLATEDIR=$2
|
|
||||||
|
|
||||||
##
|
|
||||||
# These URI should be rewritten by your distribution's xml catalog to
|
|
||||||
# match your localy installed XSL stylesheets.
|
|
||||||
XSL_BASE_URI="http://docbook.sourceforge.net/release/xsl/current"
|
|
||||||
|
|
||||||
# Creates a temporary XSL stylesheet based on titlepage.xsl
|
|
||||||
xsltproc -o /tmp/titlepage.xsl \
|
|
||||||
--xinclude \
|
|
||||||
$XSL_BASE_URI/template/titlepage.xsl \
|
|
||||||
$TEMPLATEDIR/titlepage.templates.xml || exit 1
|
|
||||||
|
|
||||||
# Creates the file needed for FOP
|
|
||||||
xsltproc --xinclude \
|
|
||||||
--stringparam hyphenate false \
|
|
||||||
--stringparam formal.title.placement "figure after" \
|
|
||||||
--stringparam ulink.show 1 \
|
|
||||||
--stringparam body.font.master 9 \
|
|
||||||
--stringparam title.font.master 11 \
|
|
||||||
--stringparam draft.watermark.image "$TEMPLATEDIR/draft.png" \
|
|
||||||
--stringparam chapter.autolabel 1 \
|
|
||||||
--stringparam appendix.autolabel A \
|
|
||||||
--stringparam section.autolabel 1 \
|
|
||||||
--stringparam section.label.includes.component.label 1 \
|
|
||||||
--output $FO \
|
|
||||||
$TEMPLATEDIR/db-pdf.xsl \
|
|
||||||
$1 || exit 1
|
|
||||||
|
|
||||||
# Invokes the Java version of FOP. Uses the additional configuration file common/fop-config.xml
|
|
||||||
fop -c $TEMPLATEDIR/fop-config.xml -fo $FO -pdf $PDF || exit 1
|
|
||||||
|
|
||||||
rm -f $FO
|
|
||||||
rm -f /tmp/titlepage.xsl
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo " #### Success! $PDF ready. ####"
|
|
||||||
echo
|
|
|
@ -23,17 +23,19 @@
|
||||||
# Assign a file to __warn__ to get warnings about slow operations.
|
# Assign a file to __warn__ to get warnings about slow operations.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import copy
|
import copy
|
||||||
import types
|
import types
|
||||||
ImmutableTypes = (
|
ImmutableTypes = (
|
||||||
|
types.NoneType,
|
||||||
bool,
|
bool,
|
||||||
complex,
|
complex,
|
||||||
float,
|
float,
|
||||||
int,
|
int,
|
||||||
|
long,
|
||||||
tuple,
|
tuple,
|
||||||
frozenset,
|
frozenset,
|
||||||
str
|
basestring
|
||||||
)
|
)
|
||||||
|
|
||||||
MUTABLE = "__mutable__"
|
MUTABLE = "__mutable__"
|
||||||
|
@ -59,7 +61,7 @@ class COWDictMeta(COWMeta):
|
||||||
__call__ = cow
|
__call__ = cow
|
||||||
|
|
||||||
def __setitem__(cls, key, value):
|
def __setitem__(cls, key, value):
|
||||||
if value is not None and not isinstance(value, ImmutableTypes):
|
if not isinstance(value, ImmutableTypes):
|
||||||
if not isinstance(value, COWMeta):
|
if not isinstance(value, COWMeta):
|
||||||
cls.__hasmutable__ = True
|
cls.__hasmutable__ = True
|
||||||
key += MUTABLE
|
key += MUTABLE
|
||||||
|
@ -114,7 +116,7 @@ class COWDictMeta(COWMeta):
|
||||||
cls.__setitem__(key, cls.__marker__)
|
cls.__setitem__(key, cls.__marker__)
|
||||||
|
|
||||||
def __revertitem__(cls, key):
|
def __revertitem__(cls, key):
|
||||||
if key not in cls.__dict__:
|
if not cls.__dict__.has_key(key):
|
||||||
key += MUTABLE
|
key += MUTABLE
|
||||||
delattr(cls, key)
|
delattr(cls, key)
|
||||||
|
|
||||||
|
@ -181,7 +183,7 @@ class COWSetMeta(COWDictMeta):
|
||||||
COWDictMeta.__delitem__(cls, repr(hash(value)))
|
COWDictMeta.__delitem__(cls, repr(hash(value)))
|
||||||
|
|
||||||
def __in__(cls, value):
|
def __in__(cls, value):
|
||||||
return repr(hash(value)) in COWDictMeta
|
return COWDictMeta.has_key(repr(hash(value)))
|
||||||
|
|
||||||
def iterkeys(cls):
|
def iterkeys(cls):
|
||||||
raise TypeError("sets don't have keys")
|
raise TypeError("sets don't have keys")
|
||||||
|
@ -190,10 +192,12 @@ class COWSetMeta(COWDictMeta):
|
||||||
raise TypeError("sets don't have 'items'")
|
raise TypeError("sets don't have 'items'")
|
||||||
|
|
||||||
# These are the actual classes you use!
|
# These are the actual classes you use!
|
||||||
class COWDictBase(object, metaclass = COWDictMeta):
|
class COWDictBase(object):
|
||||||
|
__metaclass__ = COWDictMeta
|
||||||
__count__ = 0
|
__count__ = 0
|
||||||
|
|
||||||
class COWSetBase(object, metaclass = COWSetMeta):
|
class COWSetBase(object):
|
||||||
|
__metaclass__ = COWSetMeta
|
||||||
__count__ = 0
|
__count__ = 0
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -283,7 +287,7 @@ if __name__ == "__main__":
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print("Yay! deleted key raises error")
|
print("Yay! deleted key raises error")
|
||||||
|
|
||||||
if 'b' in b:
|
if b.has_key('b'):
|
||||||
print("Boo!")
|
print("Boo!")
|
||||||
else:
|
else:
|
||||||
print("Yay - has_key with delete works!")
|
print("Yay - has_key with delete works!")
|
||||||
|
|
|
@ -21,23 +21,11 @@
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
__version__ = "1.34.0"
|
__version__ = "1.13.3"
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
if sys.version_info < (3, 4, 0):
|
if sys.version_info < (2, 6, 0):
|
||||||
raise RuntimeError("Sorry, python 3.4.0 or later is required for this version of bitbake")
|
raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake")
|
||||||
|
|
||||||
|
|
||||||
class BBHandledException(Exception):
|
|
||||||
"""
|
|
||||||
The big dilemma for generic bitbake code is what information to give the user
|
|
||||||
when an exception occurs. Any exception inheriting this base exception class
|
|
||||||
has already provided information to the user via some 'fired' message type such as
|
|
||||||
an explicitly fired event using bb.fire, or a bb.error message. If bitbake
|
|
||||||
encounters an exception derived from this class, no backtrace or other information
|
|
||||||
will be given to the user, its assumed the earlier event provided the relevant information.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
@ -70,42 +58,47 @@ logger = logging.getLogger("BitBake")
|
||||||
logger.addHandler(NullHandler())
|
logger.addHandler(NullHandler())
|
||||||
logger.setLevel(logging.DEBUG - 2)
|
logger.setLevel(logging.DEBUG - 2)
|
||||||
|
|
||||||
mainlogger = logging.getLogger("BitBake.Main")
|
|
||||||
|
|
||||||
# This has to be imported after the setLoggerClass, as the import of bb.msg
|
# This has to be imported after the setLoggerClass, as the import of bb.msg
|
||||||
# can result in construction of the various loggers.
|
# can result in construction of the various loggers.
|
||||||
import bb.msg
|
import bb.msg
|
||||||
|
|
||||||
from bb import fetch2 as fetch
|
if "BBDEBUG" in os.environ:
|
||||||
sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
|
level = int(os.environ["BBDEBUG"])
|
||||||
|
if level:
|
||||||
|
bb.msg.set_debug_level(level)
|
||||||
|
|
||||||
|
if True or os.environ.get("BBFETCH2"):
|
||||||
|
from bb import fetch2 as fetch
|
||||||
|
sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
|
||||||
|
|
||||||
# Messaging convenience functions
|
# Messaging convenience functions
|
||||||
def plain(*args):
|
def plain(*args):
|
||||||
mainlogger.plain(''.join(args))
|
logger.plain(''.join(args))
|
||||||
|
|
||||||
def debug(lvl, *args):
|
def debug(lvl, *args):
|
||||||
if isinstance(lvl, str):
|
if isinstance(lvl, basestring):
|
||||||
mainlogger.warning("Passed invalid debug level '%s' to bb.debug", lvl)
|
logger.warn("Passed invalid debug level '%s' to bb.debug", lvl)
|
||||||
args = (lvl,) + args
|
args = (lvl,) + args
|
||||||
lvl = 1
|
lvl = 1
|
||||||
mainlogger.debug(lvl, ''.join(args))
|
logger.debug(lvl, ''.join(args))
|
||||||
|
|
||||||
def note(*args):
|
def note(*args):
|
||||||
mainlogger.info(''.join(args))
|
logger.info(''.join(args))
|
||||||
|
|
||||||
def warn(*args):
|
def warn(*args):
|
||||||
mainlogger.warning(''.join(args))
|
logger.warn(''.join(args))
|
||||||
|
|
||||||
def error(*args, **kwargs):
|
def error(*args):
|
||||||
mainlogger.error(''.join(args), extra=kwargs)
|
logger.error(''.join(args))
|
||||||
|
|
||||||
|
def fatal(*args):
|
||||||
|
logger.critical(''.join(args))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
def fatal(*args, **kwargs):
|
|
||||||
mainlogger.critical(''.join(args), extra=kwargs)
|
|
||||||
raise BBHandledException()
|
|
||||||
|
|
||||||
def deprecated(func, name=None, advice=""):
|
def deprecated(func, name=None, advice=""):
|
||||||
"""This is a decorator which can be used to mark functions
|
"""This is a decorator which can be used to mark functions
|
||||||
as deprecated. It will result in a warning being emitted
|
as deprecated. It will result in a warning being emmitted
|
||||||
when the function is used."""
|
when the function is used."""
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
@ -142,3 +135,6 @@ def deprecate_import(current, modulename, fromlist, renames = None):
|
||||||
|
|
||||||
setattr(sys.modules[current], newname, newobj)
|
setattr(sys.modules[current], newname, newobj)
|
||||||
|
|
||||||
|
deprecate_import(__name__, "bb.fetch", ("MalformedUrl", "encodeurl", "decodeurl"))
|
||||||
|
deprecate_import(__name__, "bb.utils", ("mkdirhier", "movefile", "copyfile", "which"))
|
||||||
|
deprecate_import(__name__, "bb.utils", ["vercmp_string"], ["vercmp"])
|
||||||
|
|
|
@ -23,19 +23,16 @@
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
#
|
#
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import shlex
|
import shlex
|
||||||
import glob
|
|
||||||
import time
|
|
||||||
import stat
|
|
||||||
import bb
|
import bb
|
||||||
import bb.msg
|
import bb.msg
|
||||||
import bb.process
|
import bb.process
|
||||||
import bb.progress
|
from contextlib import nested
|
||||||
from bb import data, event, utils
|
from bb import data, event, utils
|
||||||
|
|
||||||
bblogger = logging.getLogger('BitBake')
|
bblogger = logging.getLogger('BitBake')
|
||||||
|
@ -43,44 +40,26 @@ logger = logging.getLogger('BitBake.Build')
|
||||||
|
|
||||||
NULL = open(os.devnull, 'r+')
|
NULL = open(os.devnull, 'r+')
|
||||||
|
|
||||||
__mtime_cache = {}
|
|
||||||
|
|
||||||
def cached_mtime_noerror(f):
|
# When we execute a python function we'd like certain things
|
||||||
if f not in __mtime_cache:
|
# in all namespaces, hence we add them to __builtins__
|
||||||
try:
|
|
||||||
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
|
|
||||||
except OSError:
|
|
||||||
return 0
|
|
||||||
return __mtime_cache[f]
|
|
||||||
|
|
||||||
def reset_cache():
|
|
||||||
global __mtime_cache
|
|
||||||
__mtime_cache = {}
|
|
||||||
|
|
||||||
# When we execute a Python function, we'd like certain things
|
|
||||||
# in all namespaces, hence we add them to __builtins__.
|
|
||||||
# If we do not do this and use the exec globals, they will
|
# If we do not do this and use the exec globals, they will
|
||||||
# not be available to subfunctions.
|
# not be available to subfunctions.
|
||||||
if hasattr(__builtins__, '__setitem__'):
|
__builtins__['bb'] = bb
|
||||||
builtins = __builtins__
|
__builtins__['os'] = os
|
||||||
else:
|
|
||||||
builtins = __builtins__.__dict__
|
|
||||||
|
|
||||||
builtins['bb'] = bb
|
|
||||||
builtins['os'] = os
|
|
||||||
|
|
||||||
class FuncFailed(Exception):
|
class FuncFailed(Exception):
|
||||||
def __init__(self, name = None, logfile = None):
|
def __init__(self, name = None, logfile = None):
|
||||||
self.logfile = logfile
|
self.logfile = logfile
|
||||||
self.name = name
|
self.name = name
|
||||||
if name:
|
if name:
|
||||||
self.msg = 'Function failed: %s' % name
|
self.msg = "Function '%s' failed" % name
|
||||||
else:
|
else:
|
||||||
self.msg = "Function failed"
|
self.msg = "Function failed"
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.logfile and os.path.exists(self.logfile):
|
if self.logfile and os.path.exists(self.logfile):
|
||||||
msg = ("%s (log file is located at %s)" %
|
msg = ("%s (see %s for further information)" %
|
||||||
(self.msg, self.logfile))
|
(self.msg, self.logfile))
|
||||||
else:
|
else:
|
||||||
msg = self.msg
|
msg = self.msg
|
||||||
|
@ -89,16 +68,11 @@ class FuncFailed(Exception):
|
||||||
class TaskBase(event.Event):
|
class TaskBase(event.Event):
|
||||||
"""Base class for task events"""
|
"""Base class for task events"""
|
||||||
|
|
||||||
def __init__(self, t, logfile, d):
|
def __init__(self, t, d ):
|
||||||
self._task = t
|
self._task = t
|
||||||
self._package = d.getVar("PF")
|
self._package = bb.data.getVar("PF", d, 1)
|
||||||
self._mc = d.getVar("BB_CURRENT_MC")
|
|
||||||
self.taskfile = d.getVar("FILE")
|
|
||||||
self.taskname = self._task
|
|
||||||
self.logfile = logfile
|
|
||||||
self.time = time.time()
|
|
||||||
event.Event.__init__(self)
|
event.Event.__init__(self)
|
||||||
self._message = "recipe %s: task %s: %s" % (d.getVar("PF"), t, self.getDisplayName())
|
self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
|
||||||
|
|
||||||
def getTask(self):
|
def getTask(self):
|
||||||
return self._task
|
return self._task
|
||||||
|
@ -106,16 +80,10 @@ class TaskBase(event.Event):
|
||||||
def setTask(self, task):
|
def setTask(self, task):
|
||||||
self._task = task
|
self._task = task
|
||||||
|
|
||||||
def getDisplayName(self):
|
|
||||||
return bb.event.getName(self)[4:]
|
|
||||||
|
|
||||||
task = property(getTask, setTask, None, "task property")
|
task = property(getTask, setTask, None, "task property")
|
||||||
|
|
||||||
class TaskStarted(TaskBase):
|
class TaskStarted(TaskBase):
|
||||||
"""Task execution started"""
|
"""Task execution started"""
|
||||||
def __init__(self, t, logfile, taskflags, d):
|
|
||||||
super(TaskStarted, self).__init__(t, logfile, d)
|
|
||||||
self.taskflags = taskflags
|
|
||||||
|
|
||||||
class TaskSucceeded(TaskBase):
|
class TaskSucceeded(TaskBase):
|
||||||
"""Task execution completed"""
|
"""Task execution completed"""
|
||||||
|
@ -123,41 +91,16 @@ class TaskSucceeded(TaskBase):
|
||||||
class TaskFailed(TaskBase):
|
class TaskFailed(TaskBase):
|
||||||
"""Task execution failed"""
|
"""Task execution failed"""
|
||||||
|
|
||||||
def __init__(self, task, logfile, metadata, errprinted = False):
|
def __init__(self, task, logfile, metadata):
|
||||||
self.errprinted = errprinted
|
self.logfile = logfile
|
||||||
super(TaskFailed, self).__init__(task, logfile, metadata)
|
super(TaskFailed, self).__init__(task, metadata)
|
||||||
|
|
||||||
class TaskFailedSilent(TaskBase):
|
|
||||||
"""Task execution failed (silently)"""
|
|
||||||
def getDisplayName(self):
|
|
||||||
# Don't need to tell the user it was silent
|
|
||||||
return "Failed"
|
|
||||||
|
|
||||||
class TaskInvalid(TaskBase):
|
class TaskInvalid(TaskBase):
|
||||||
|
|
||||||
def __init__(self, task, metadata):
|
def __init__(self, task, metadata):
|
||||||
super(TaskInvalid, self).__init__(task, None, metadata)
|
super(TaskInvalid, self).__init__(task, metadata)
|
||||||
self._message = "No such task '%s'" % task
|
self._message = "No such task '%s'" % task
|
||||||
|
|
||||||
class TaskProgress(event.Event):
|
|
||||||
"""
|
|
||||||
Task made some progress that could be reported to the user, usually in
|
|
||||||
the form of a progress bar or similar.
|
|
||||||
NOTE: this class does not inherit from TaskBase since it doesn't need
|
|
||||||
to - it's fired within the task context itself, so we don't have any of
|
|
||||||
the context information that you do in the case of the other events.
|
|
||||||
The event PID can be used to determine which task it came from.
|
|
||||||
The progress value is normally 0-100, but can also be negative
|
|
||||||
indicating that progress has been made but we aren't able to determine
|
|
||||||
how much.
|
|
||||||
The rate is optional, this is simply an extra string to display to the
|
|
||||||
user if specified.
|
|
||||||
"""
|
|
||||||
def __init__(self, progress, rate=None):
|
|
||||||
self.progress = progress
|
|
||||||
self.rate = rate
|
|
||||||
event.Event.__init__(self)
|
|
||||||
|
|
||||||
|
|
||||||
class LogTee(object):
|
class LogTee(object):
|
||||||
def __init__(self, logger, outfile):
|
def __init__(self, logger, outfile):
|
||||||
|
@ -178,166 +121,95 @@ class LogTee(object):
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<LogTee {0}>'.format(self.name)
|
return '<LogTee {0}>'.format(self.name)
|
||||||
def flush(self):
|
|
||||||
self.outfile.flush()
|
|
||||||
|
|
||||||
#
|
|
||||||
# pythonexception allows the python exceptions generated to be raised
|
|
||||||
# as the real exceptions (not FuncFailed) and without a backtrace at the
|
|
||||||
# origin of the failure.
|
|
||||||
#
|
|
||||||
def exec_func(func, d, dirs = None, pythonexception=False):
|
|
||||||
"""Execute a BB 'function'"""
|
|
||||||
|
|
||||||
try:
|
def exec_func(func, d, dirs = None):
|
||||||
oldcwd = os.getcwd()
|
"""Execute an BB 'function'"""
|
||||||
except:
|
|
||||||
oldcwd = None
|
|
||||||
|
|
||||||
flags = d.getVarFlags(func)
|
body = data.getVar(func, d)
|
||||||
cleandirs = flags.get('cleandirs') if flags else None
|
if not body:
|
||||||
|
if body is None:
|
||||||
|
logger.warn("Function %s doesn't exist", func)
|
||||||
|
return
|
||||||
|
|
||||||
|
flags = data.getVarFlags(func, d)
|
||||||
|
cleandirs = flags.get('cleandirs')
|
||||||
if cleandirs:
|
if cleandirs:
|
||||||
for cdir in d.expand(cleandirs).split():
|
for cdir in data.expand(cleandirs, d).split():
|
||||||
bb.utils.remove(cdir, True)
|
bb.utils.remove(cdir, True)
|
||||||
bb.utils.mkdirhier(cdir)
|
|
||||||
|
|
||||||
if flags and dirs is None:
|
if dirs is None:
|
||||||
dirs = flags.get('dirs')
|
dirs = flags.get('dirs')
|
||||||
if dirs:
|
if dirs:
|
||||||
dirs = d.expand(dirs).split()
|
dirs = data.expand(dirs, d).split()
|
||||||
|
|
||||||
if dirs:
|
if dirs:
|
||||||
for adir in dirs:
|
for adir in dirs:
|
||||||
bb.utils.mkdirhier(adir)
|
bb.utils.mkdirhier(adir)
|
||||||
adir = dirs[-1]
|
adir = dirs[-1]
|
||||||
else:
|
else:
|
||||||
adir = None
|
adir = data.getVar('B', d, 1)
|
||||||
|
bb.utils.mkdirhier(adir)
|
||||||
body = d.getVar(func, False)
|
|
||||||
if not body:
|
|
||||||
if body is None:
|
|
||||||
logger.warning("Function %s doesn't exist", func)
|
|
||||||
return
|
|
||||||
|
|
||||||
ispython = flags.get('python')
|
ispython = flags.get('python')
|
||||||
|
if flags.get('fakeroot') and not flags.get('task'):
|
||||||
|
bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func)
|
||||||
|
|
||||||
lockflag = flags.get('lockfiles')
|
lockflag = flags.get('lockfiles')
|
||||||
if lockflag:
|
if lockflag:
|
||||||
lockfiles = [f for f in d.expand(lockflag).split()]
|
lockfiles = [data.expand(f, d) for f in lockflag.split()]
|
||||||
else:
|
else:
|
||||||
lockfiles = None
|
lockfiles = None
|
||||||
|
|
||||||
tempdir = d.getVar('T')
|
tempdir = data.getVar('T', d, 1)
|
||||||
|
bb.utils.mkdirhier(tempdir)
|
||||||
# or func allows items to be executed outside of the normal
|
runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))
|
||||||
# task set, such as buildhistory
|
|
||||||
task = d.getVar('BB_RUNTASK') or func
|
|
||||||
if task == func:
|
|
||||||
taskfunc = task
|
|
||||||
else:
|
|
||||||
taskfunc = "%s.%s" % (task, func)
|
|
||||||
|
|
||||||
runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}"
|
|
||||||
runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid())
|
|
||||||
runfile = os.path.join(tempdir, runfn)
|
|
||||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
|
||||||
|
|
||||||
# Setup the courtesy link to the runfn, only for tasks
|
|
||||||
# we create the link 'just' before the run script is created
|
|
||||||
# if we create it after, and if the run script fails, then the
|
|
||||||
# link won't be created as an exception would be fired.
|
|
||||||
if task == func:
|
|
||||||
runlink = os.path.join(tempdir, 'run.{0}'.format(task))
|
|
||||||
if runlink:
|
|
||||||
bb.utils.remove(runlink)
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.symlink(runfn, runlink)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
with bb.utils.fileslocked(lockfiles):
|
with bb.utils.fileslocked(lockfiles):
|
||||||
if ispython:
|
if ispython:
|
||||||
exec_func_python(func, d, runfile, cwd=adir, pythonexception=pythonexception)
|
exec_func_python(func, d, runfile, cwd=adir)
|
||||||
else:
|
else:
|
||||||
exec_func_shell(func, d, runfile, cwd=adir)
|
exec_func_shell(func, d, runfile, cwd=adir)
|
||||||
|
|
||||||
try:
|
|
||||||
curcwd = os.getcwd()
|
|
||||||
except:
|
|
||||||
curcwd = None
|
|
||||||
|
|
||||||
if oldcwd and curcwd != oldcwd:
|
|
||||||
try:
|
|
||||||
bb.warn("Task %s changed cwd to %s" % (func, curcwd))
|
|
||||||
os.chdir(oldcwd)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
_functionfmt = """
|
_functionfmt = """
|
||||||
|
def {function}(d):
|
||||||
|
{body}
|
||||||
|
|
||||||
{function}(d)
|
{function}(d)
|
||||||
"""
|
"""
|
||||||
logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
|
||||||
def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
|
def exec_func_python(func, d, runfile, cwd=None):
|
||||||
"""Execute a python BB 'function'"""
|
"""Execute a python BB 'function'"""
|
||||||
|
|
||||||
code = _functionfmt.format(function=func)
|
bbfile = d.getVar('FILE', True)
|
||||||
|
code = _functionfmt.format(function=func, body=d.getVar(func, True))
|
||||||
bb.utils.mkdirhier(os.path.dirname(runfile))
|
bb.utils.mkdirhier(os.path.dirname(runfile))
|
||||||
with open(runfile, 'w') as script:
|
with open(runfile, 'w') as script:
|
||||||
bb.data.emit_func_python(func, script, d)
|
script.write(code)
|
||||||
|
|
||||||
if cwd:
|
if cwd:
|
||||||
try:
|
try:
|
||||||
olddir = os.getcwd()
|
olddir = os.getcwd()
|
||||||
except OSError as e:
|
except OSError:
|
||||||
bb.warn("%s: Cannot get cwd: %s" % (func, e))
|
|
||||||
olddir = None
|
olddir = None
|
||||||
os.chdir(cwd)
|
os.chdir(cwd)
|
||||||
|
|
||||||
bb.debug(2, "Executing python function %s" % func)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
text = "def %s(d):\n%s" % (func, d.getVar(func, False))
|
comp = utils.better_compile(code, func, bbfile)
|
||||||
fn = d.getVarFlag(func, "filename", False)
|
utils.better_exec(comp, {"d": d}, code, bbfile)
|
||||||
lineno = int(d.getVarFlag(func, "lineno", False))
|
|
||||||
bb.methodpool.insert_method(func, text, fn, lineno - 1)
|
|
||||||
|
|
||||||
comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
|
|
||||||
utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated", pythonexception=pythonexception)
|
|
||||||
except (bb.parse.SkipRecipe, bb.build.FuncFailed):
|
|
||||||
raise
|
|
||||||
except:
|
except:
|
||||||
if pythonexception:
|
if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
raise FuncFailed(func, None)
|
raise FuncFailed(func, None)
|
||||||
finally:
|
finally:
|
||||||
bb.debug(2, "Python function %s finished" % func)
|
|
||||||
|
|
||||||
if cwd and olddir:
|
if cwd and olddir:
|
||||||
try:
|
try:
|
||||||
os.chdir(olddir)
|
os.chdir(olddir)
|
||||||
except OSError as e:
|
except OSError:
|
||||||
bb.warn("%s: Cannot restore cwd %s: %s" % (func, olddir, e))
|
pass
|
||||||
|
|
||||||
def shell_trap_code():
|
def exec_func_shell(function, d, runfile, cwd=None):
|
||||||
return '''#!/bin/sh\n
|
|
||||||
# Emit a useful diagnostic if something fails:
|
|
||||||
bb_exit_handler() {
|
|
||||||
ret=$?
|
|
||||||
case $ret in
|
|
||||||
0) ;;
|
|
||||||
*) case $BASH_VERSION in
|
|
||||||
"") echo "WARNING: exit code $ret from a shell command.";;
|
|
||||||
*) echo "WARNING: ${BASH_SOURCE[0]}:${BASH_LINENO[0]} exit $ret from '$BASH_COMMAND'";;
|
|
||||||
esac
|
|
||||||
exit $ret
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
trap 'bb_exit_handler' 0
|
|
||||||
set -e
|
|
||||||
'''
|
|
||||||
|
|
||||||
def exec_func_shell(func, d, runfile, cwd=None):
|
|
||||||
"""Execute a shell function from the metadata
|
"""Execute a shell function from the metadata
|
||||||
|
|
||||||
Note on directory behavior. The 'dirs' varflag should contain a list
|
Note on directory behavior. The 'dirs' varflag should contain a list
|
||||||
|
@ -349,115 +221,37 @@ def exec_func_shell(func, d, runfile, cwd=None):
|
||||||
d.delVarFlag('PWD', 'export')
|
d.delVarFlag('PWD', 'export')
|
||||||
|
|
||||||
with open(runfile, 'w') as script:
|
with open(runfile, 'w') as script:
|
||||||
script.write(shell_trap_code())
|
script.write('#!/bin/sh -e\n')
|
||||||
|
if bb.msg.loggerDefaultVerbose:
|
||||||
bb.data.emit_func(func, script, d)
|
|
||||||
|
|
||||||
if bb.msg.loggerVerboseLogs:
|
|
||||||
script.write("set -x\n")
|
script.write("set -x\n")
|
||||||
|
data.emit_func(function, script, d)
|
||||||
if cwd:
|
if cwd:
|
||||||
script.write("cd '%s'\n" % cwd)
|
script.write("cd %s\n" % cwd)
|
||||||
script.write("%s\n" % func)
|
script.write("%s\n" % function)
|
||||||
script.write('''
|
|
||||||
# cleanup
|
|
||||||
ret=$?
|
|
||||||
trap '' 0
|
|
||||||
exit $ret
|
|
||||||
''')
|
|
||||||
|
|
||||||
os.chmod(runfile, 0o775)
|
os.chmod(runfile, 0775)
|
||||||
|
|
||||||
cmd = runfile
|
cmd = runfile
|
||||||
if d.getVarFlag(func, 'fakeroot', False):
|
|
||||||
fakerootcmd = d.getVar('FAKEROOT')
|
|
||||||
if fakerootcmd:
|
|
||||||
cmd = [fakerootcmd, runfile]
|
|
||||||
|
|
||||||
if bb.msg.loggerDefaultVerbose:
|
if bb.msg.loggerDefaultVerbose:
|
||||||
logfile = LogTee(logger, sys.stdout)
|
logfile = LogTee(logger, sys.stdout)
|
||||||
else:
|
else:
|
||||||
logfile = sys.stdout
|
logfile = sys.stdout
|
||||||
|
|
||||||
progress = d.getVarFlag(func, 'progress')
|
try:
|
||||||
if progress:
|
bb.process.run(cmd, shell=False, stdin=NULL, log=logfile)
|
||||||
if progress == 'percent':
|
except bb.process.CmdError:
|
||||||
# Use default regex
|
logfn = d.getVar('BB_LOGFILE', True)
|
||||||
logfile = bb.progress.BasicProgressHandler(d, outfile=logfile)
|
raise FuncFailed(function, logfn)
|
||||||
elif progress.startswith('percent:'):
|
|
||||||
# Use specified regex
|
|
||||||
logfile = bb.progress.BasicProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
|
|
||||||
elif progress.startswith('outof:'):
|
|
||||||
# Use specified regex
|
|
||||||
logfile = bb.progress.OutOfProgressHandler(d, regex=progress.split(':', 1)[1], outfile=logfile)
|
|
||||||
else:
|
|
||||||
bb.warn('%s: invalid task progress varflag value "%s", ignoring' % (func, progress))
|
|
||||||
|
|
||||||
fifobuffer = bytearray()
|
|
||||||
def readfifo(data):
|
|
||||||
nonlocal fifobuffer
|
|
||||||
fifobuffer.extend(data)
|
|
||||||
while fifobuffer:
|
|
||||||
message, token, nextmsg = fifobuffer.partition(b"\00")
|
|
||||||
if token:
|
|
||||||
splitval = message.split(b' ', 1)
|
|
||||||
cmd = splitval[0].decode("utf-8")
|
|
||||||
if len(splitval) > 1:
|
|
||||||
value = splitval[1].decode("utf-8")
|
|
||||||
else:
|
|
||||||
value = ''
|
|
||||||
if cmd == 'bbplain':
|
|
||||||
bb.plain(value)
|
|
||||||
elif cmd == 'bbnote':
|
|
||||||
bb.note(value)
|
|
||||||
elif cmd == 'bbwarn':
|
|
||||||
bb.warn(value)
|
|
||||||
elif cmd == 'bberror':
|
|
||||||
bb.error(value)
|
|
||||||
elif cmd == 'bbfatal':
|
|
||||||
# The caller will call exit themselves, so bb.error() is
|
|
||||||
# what we want here rather than bb.fatal()
|
|
||||||
bb.error(value)
|
|
||||||
elif cmd == 'bbfatal_log':
|
|
||||||
bb.error(value, forcelog=True)
|
|
||||||
elif cmd == 'bbdebug':
|
|
||||||
splitval = value.split(' ', 1)
|
|
||||||
level = int(splitval[0])
|
|
||||||
value = splitval[1]
|
|
||||||
bb.debug(level, value)
|
|
||||||
else:
|
|
||||||
bb.warn("Unrecognised command '%s' on FIFO" % cmd)
|
|
||||||
fifobuffer = nextmsg
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
tempdir = d.getVar('T')
|
|
||||||
fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid())
|
|
||||||
if os.path.exists(fifopath):
|
|
||||||
os.unlink(fifopath)
|
|
||||||
os.mkfifo(fifopath)
|
|
||||||
with open(fifopath, 'r+b', buffering=0) as fifo:
|
|
||||||
try:
|
|
||||||
bb.debug(2, "Executing shell function %s" % func)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(os.devnull, 'r+') as stdin:
|
|
||||||
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)])
|
|
||||||
except bb.process.CmdError:
|
|
||||||
logfn = d.getVar('BB_LOGFILE')
|
|
||||||
raise FuncFailed(func, logfn)
|
|
||||||
finally:
|
|
||||||
os.unlink(fifopath)
|
|
||||||
|
|
||||||
bb.debug(2, "Shell function %s finished" % func)
|
|
||||||
|
|
||||||
def _task_data(fn, task, d):
|
def _task_data(fn, task, d):
|
||||||
localdata = bb.data.createCopy(d)
|
localdata = data.createCopy(d)
|
||||||
localdata.setVar('BB_FILENAME', fn)
|
localdata.setVar('BB_FILENAME', fn)
|
||||||
localdata.setVar('BB_CURRENTTASK', task[3:])
|
localdata.setVar('BB_CURRENTTASK', task[3:])
|
||||||
localdata.setVar('OVERRIDES', 'task-%s:%s' %
|
localdata.setVar('OVERRIDES', 'task-%s:%s' %
|
||||||
(task[3:].replace('_', '-'), d.getVar('OVERRIDES', False)))
|
(task[3:], d.getVar('OVERRIDES', False)))
|
||||||
localdata.finalize()
|
localdata.finalize()
|
||||||
bb.data.expandKeys(localdata)
|
data.expandKeys(localdata)
|
||||||
return localdata
|
return localdata
|
||||||
|
|
||||||
def _exec_task(fn, task, d, quieterr):
|
def _exec_task(fn, task, d, quieterr):
|
||||||
|
@ -466,7 +260,7 @@ def _exec_task(fn, task, d, quieterr):
|
||||||
Execution of a task involves a bit more setup than executing a function,
|
Execution of a task involves a bit more setup than executing a function,
|
||||||
running it with its own local metadata, and with some useful variables set.
|
running it with its own local metadata, and with some useful variables set.
|
||||||
"""
|
"""
|
||||||
if not d.getVarFlag(task, 'task', False):
|
if not data.getVarFlag(task, 'task', d):
|
||||||
event.fire(TaskInvalid(task, d), d)
|
event.fire(TaskInvalid(task, d), d)
|
||||||
logger.error("No such task: %s" % task)
|
logger.error("No such task: %s" % task)
|
||||||
return 1
|
return 1
|
||||||
|
@ -474,69 +268,28 @@ def _exec_task(fn, task, d, quieterr):
|
||||||
logger.debug(1, "Executing task %s", task)
|
logger.debug(1, "Executing task %s", task)
|
||||||
|
|
||||||
localdata = _task_data(fn, task, d)
|
localdata = _task_data(fn, task, d)
|
||||||
tempdir = localdata.getVar('T')
|
tempdir = localdata.getVar('T', True)
|
||||||
if not tempdir:
|
if not tempdir:
|
||||||
bb.fatal("T variable not set, unable to build")
|
bb.fatal("T variable not set, unable to build")
|
||||||
|
|
||||||
# Change nice level if we're asked to
|
|
||||||
nice = localdata.getVar("BB_TASK_NICE_LEVEL")
|
|
||||||
if nice:
|
|
||||||
curnice = os.nice(0)
|
|
||||||
nice = int(nice) - curnice
|
|
||||||
newnice = os.nice(nice)
|
|
||||||
logger.debug(1, "Renice to %s " % newnice)
|
|
||||||
ionice = localdata.getVar("BB_TASK_IONICE_LEVEL")
|
|
||||||
if ionice:
|
|
||||||
try:
|
|
||||||
cls, prio = ionice.split(".", 1)
|
|
||||||
bb.utils.ioprio_set(os.getpid(), int(cls), int(prio))
|
|
||||||
except:
|
|
||||||
bb.warn("Invalid ionice level %s" % ionice)
|
|
||||||
|
|
||||||
bb.utils.mkdirhier(tempdir)
|
bb.utils.mkdirhier(tempdir)
|
||||||
|
|
||||||
# Determine the logfile to generate
|
|
||||||
logfmt = localdata.getVar('BB_LOGFMT') or 'log.{task}.{pid}'
|
|
||||||
logbase = logfmt.format(task=task, pid=os.getpid())
|
|
||||||
|
|
||||||
# Document the order of the tasks...
|
|
||||||
logorder = os.path.join(tempdir, 'log.task_order')
|
|
||||||
try:
|
|
||||||
with open(logorder, 'a') as logorderfile:
|
|
||||||
logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
|
|
||||||
except OSError:
|
|
||||||
logger.exception("Opening log file '%s'", logorder)
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Setup the courtesy link to the logfn
|
|
||||||
loglink = os.path.join(tempdir, 'log.{0}'.format(task))
|
loglink = os.path.join(tempdir, 'log.{0}'.format(task))
|
||||||
logfn = os.path.join(tempdir, logbase)
|
logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid()))
|
||||||
if loglink:
|
if loglink:
|
||||||
bb.utils.remove(loglink)
|
bb.utils.remove(loglink)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.symlink(logbase, loglink)
|
os.symlink(logfn, loglink)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
|
prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
|
||||||
postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
|
postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
|
||||||
|
|
||||||
class ErrorCheckHandler(logging.Handler):
|
|
||||||
def __init__(self):
|
|
||||||
self.triggered = False
|
|
||||||
logging.Handler.__init__(self, logging.ERROR)
|
|
||||||
def emit(self, record):
|
|
||||||
if getattr(record, 'forcelog', False):
|
|
||||||
self.triggered = False
|
|
||||||
else:
|
|
||||||
self.triggered = True
|
|
||||||
|
|
||||||
# Handle logfiles
|
# Handle logfiles
|
||||||
si = open('/dev/null', 'r')
|
si = file('/dev/null', 'r')
|
||||||
try:
|
try:
|
||||||
bb.utils.mkdirhier(os.path.dirname(logfn))
|
logfile = file(logfn, 'w')
|
||||||
logfile = open(logfn, 'w')
|
|
||||||
except OSError:
|
except OSError:
|
||||||
logger.exception("Opening log file '%s'", logfn)
|
logger.exception("Opening log file '%s'", logfn)
|
||||||
pass
|
pass
|
||||||
|
@ -551,48 +304,27 @@ def _exec_task(fn, task, d, quieterr):
|
||||||
os.dup2(logfile.fileno(), oso[1])
|
os.dup2(logfile.fileno(), oso[1])
|
||||||
os.dup2(logfile.fileno(), ose[1])
|
os.dup2(logfile.fileno(), ose[1])
|
||||||
|
|
||||||
# Ensure Python logging goes to the logfile
|
# Ensure python logging goes to the logfile
|
||||||
handler = logging.StreamHandler(logfile)
|
handler = logging.StreamHandler(logfile)
|
||||||
handler.setFormatter(logformatter)
|
handler.setFormatter(logformatter)
|
||||||
# Always enable full debug output into task logfiles
|
# Always enable full debug output into task logfiles
|
||||||
handler.setLevel(logging.DEBUG - 2)
|
handler.setLevel(logging.DEBUG - 2)
|
||||||
bblogger.addHandler(handler)
|
bblogger.addHandler(handler)
|
||||||
|
|
||||||
errchk = ErrorCheckHandler()
|
|
||||||
bblogger.addHandler(errchk)
|
|
||||||
|
|
||||||
localdata.setVar('BB_LOGFILE', logfn)
|
localdata.setVar('BB_LOGFILE', logfn)
|
||||||
localdata.setVar('BB_RUNTASK', task)
|
|
||||||
localdata.setVar('BB_TASK_LOGGER', bblogger)
|
|
||||||
|
|
||||||
flags = localdata.getVarFlags(task)
|
|
||||||
|
|
||||||
|
event.fire(TaskStarted(task, localdata), localdata)
|
||||||
try:
|
try:
|
||||||
try:
|
for func in (prefuncs or '').split():
|
||||||
event.fire(TaskStarted(task, logfn, flags, localdata), localdata)
|
exec_func(func, localdata)
|
||||||
except (bb.BBHandledException, SystemExit):
|
exec_func(task, localdata)
|
||||||
return 1
|
for func in (postfuncs or '').split():
|
||||||
except FuncFailed as exc:
|
exec_func(func, localdata)
|
||||||
|
except FuncFailed as exc:
|
||||||
|
if not quieterr:
|
||||||
logger.error(str(exc))
|
logger.error(str(exc))
|
||||||
return 1
|
event.fire(TaskFailed(task, logfn, localdata), localdata)
|
||||||
|
return 1
|
||||||
try:
|
|
||||||
for func in (prefuncs or '').split():
|
|
||||||
exec_func(func, localdata)
|
|
||||||
exec_func(task, localdata)
|
|
||||||
for func in (postfuncs or '').split():
|
|
||||||
exec_func(func, localdata)
|
|
||||||
except FuncFailed as exc:
|
|
||||||
if quieterr:
|
|
||||||
event.fire(TaskFailedSilent(task, logfn, localdata), localdata)
|
|
||||||
else:
|
|
||||||
errprinted = errchk.triggered
|
|
||||||
logger.error(str(exc))
|
|
||||||
event.fire(TaskFailed(task, logfn, localdata, errprinted), localdata)
|
|
||||||
return 1
|
|
||||||
except bb.BBHandledException:
|
|
||||||
event.fire(TaskFailed(task, logfn, localdata, True), localdata)
|
|
||||||
return 1
|
|
||||||
finally:
|
finally:
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
@ -615,44 +347,30 @@ def _exec_task(fn, task, d, quieterr):
|
||||||
logger.debug(2, "Zero size logfn %s, removing", logfn)
|
logger.debug(2, "Zero size logfn %s, removing", logfn)
|
||||||
bb.utils.remove(logfn)
|
bb.utils.remove(logfn)
|
||||||
bb.utils.remove(loglink)
|
bb.utils.remove(loglink)
|
||||||
event.fire(TaskSucceeded(task, logfn, localdata), localdata)
|
event.fire(TaskSucceeded(task, localdata), localdata)
|
||||||
|
|
||||||
if not localdata.getVarFlag(task, 'nostamp', False) and not localdata.getVarFlag(task, 'selfstamp', False):
|
if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
|
||||||
make_stamp(task, localdata)
|
make_stamp(task, localdata)
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def exec_task(fn, task, d, profile = False):
|
def exec_task(fn, task, d):
|
||||||
try:
|
try:
|
||||||
quieterr = False
|
quieterr = False
|
||||||
if d.getVarFlag(task, "quieterrors", False) is not None:
|
if d.getVarFlag(task, "quieterrors") is not None:
|
||||||
quieterr = True
|
quieterr = True
|
||||||
|
|
||||||
if profile:
|
return _exec_task(fn, task, d, quieterr)
|
||||||
profname = "profile-%s.log" % (d.getVar("PN") + "-" + task)
|
|
||||||
try:
|
|
||||||
import cProfile as profile
|
|
||||||
except:
|
|
||||||
import profile
|
|
||||||
prof = profile.Profile()
|
|
||||||
ret = profile.Profile.runcall(prof, _exec_task, fn, task, d, quieterr)
|
|
||||||
prof.dump_stats(profname)
|
|
||||||
bb.utils.process_profilelog(profname)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
else:
|
|
||||||
return _exec_task(fn, task, d, quieterr)
|
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
from traceback import format_exc
|
from traceback import format_exc
|
||||||
if not quieterr:
|
if not quieterr:
|
||||||
logger.error("Build of %s failed" % (task))
|
logger.error("Build of %s failed" % (task))
|
||||||
logger.error(format_exc())
|
logger.error(format_exc())
|
||||||
failedevent = TaskFailed(task, None, d, True)
|
failedevent = TaskFailed(task, None, d)
|
||||||
event.fire(failedevent, d)
|
event.fire(failedevent, d)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
|
def stamp_internal(taskname, d, file_name):
|
||||||
"""
|
"""
|
||||||
Internal stamp helper function
|
Internal stamp helper function
|
||||||
Makes sure the stamp directory exists
|
Makes sure the stamp directory exists
|
||||||
|
@ -666,85 +384,40 @@ def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False):
|
||||||
taskflagname = taskname.replace("_setscene", "")
|
taskflagname = taskname.replace("_setscene", "")
|
||||||
|
|
||||||
if file_name:
|
if file_name:
|
||||||
stamp = d.stamp[file_name]
|
stamp = d.stamp_base[file_name].get(taskflagname) or d.stamp[file_name]
|
||||||
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
||||||
else:
|
else:
|
||||||
stamp = d.getVar('STAMP')
|
stamp = d.getVarFlag(taskflagname, 'stamp-base', True) or d.getVar('STAMP', True)
|
||||||
file_name = d.getVar('BB_FILENAME')
|
file_name = d.getVar('BB_FILENAME', True)
|
||||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
|
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
|
||||||
|
|
||||||
if baseonly:
|
|
||||||
return stamp
|
|
||||||
if noextra:
|
|
||||||
extrainfo = ""
|
|
||||||
|
|
||||||
if not stamp:
|
if not stamp:
|
||||||
return
|
return
|
||||||
|
|
||||||
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
|
||||||
|
|
||||||
stampdir = os.path.dirname(stamp)
|
bb.utils.mkdirhier(os.path.dirname(stamp))
|
||||||
if cached_mtime_noerror(stampdir) == 0:
|
|
||||||
bb.utils.mkdirhier(stampdir)
|
|
||||||
|
|
||||||
return stamp
|
return stamp
|
||||||
|
|
||||||
def stamp_cleanmask_internal(taskname, d, file_name):
|
|
||||||
"""
|
|
||||||
Internal stamp helper function to generate stamp cleaning mask
|
|
||||||
Returns the stamp path+filename
|
|
||||||
|
|
||||||
In the bitbake core, d can be a CacheData and file_name will be set.
|
|
||||||
When called in task context, d will be a data store, file_name will not be set
|
|
||||||
"""
|
|
||||||
taskflagname = taskname
|
|
||||||
if taskname.endswith("_setscene") and taskname != "do_setscene":
|
|
||||||
taskflagname = taskname.replace("_setscene", "")
|
|
||||||
|
|
||||||
if file_name:
|
|
||||||
stamp = d.stampclean[file_name]
|
|
||||||
extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
|
|
||||||
else:
|
|
||||||
stamp = d.getVar('STAMPCLEAN')
|
|
||||||
file_name = d.getVar('BB_FILENAME')
|
|
||||||
extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or ""
|
|
||||||
|
|
||||||
if not stamp:
|
|
||||||
return []
|
|
||||||
|
|
||||||
cleanmask = bb.parse.siggen.stampcleanmask(stamp, file_name, taskname, extrainfo)
|
|
||||||
|
|
||||||
return [cleanmask, cleanmask.replace(taskflagname, taskflagname + "_setscene")]
|
|
||||||
|
|
||||||
def make_stamp(task, d, file_name = None):
|
def make_stamp(task, d, file_name = None):
|
||||||
"""
|
"""
|
||||||
Creates/updates a stamp for a given task
|
Creates/updates a stamp for a given task
|
||||||
(d can be a data dict or dataCache)
|
(d can be a data dict or dataCache)
|
||||||
"""
|
"""
|
||||||
cleanmask = stamp_cleanmask_internal(task, d, file_name)
|
|
||||||
for mask in cleanmask:
|
|
||||||
for name in glob.glob(mask):
|
|
||||||
# Preserve sigdata files in the stamps directory
|
|
||||||
if "sigdata" in name or "sigbasedata" in name:
|
|
||||||
continue
|
|
||||||
# Preserve taint files in the stamps directory
|
|
||||||
if name.endswith('.taint'):
|
|
||||||
continue
|
|
||||||
os.unlink(name)
|
|
||||||
|
|
||||||
stamp = stamp_internal(task, d, file_name)
|
stamp = stamp_internal(task, d, file_name)
|
||||||
# Remove the file and recreate to force timestamp
|
# Remove the file and recreate to force timestamp
|
||||||
# change on broken NFS filesystems
|
# change on broken NFS filesystems
|
||||||
if stamp:
|
if stamp:
|
||||||
bb.utils.remove(stamp)
|
bb.utils.remove(stamp)
|
||||||
open(stamp, "w").close()
|
f = open(stamp, "w")
|
||||||
|
f.close()
|
||||||
|
|
||||||
# If we're in task context, write out a signature file for each task
|
# If we're in task context, write out a signature file for each task
|
||||||
# as it completes
|
# as it completes
|
||||||
if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
|
if not task.endswith("_setscene") and task != "do_setscene" and not file_name:
|
||||||
stampbase = stamp_internal(task, d, None, True)
|
file_name = d.getVar('BB_FILENAME', True)
|
||||||
file_name = d.getVar('BB_FILENAME')
|
bb.parse.siggen.dump_sigtask(file_name, task, d.getVar('STAMP', True), True)
|
||||||
bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True)
|
|
||||||
|
|
||||||
def del_stamp(task, d, file_name = None):
|
def del_stamp(task, d, file_name = None):
|
||||||
"""
|
"""
|
||||||
|
@ -754,33 +427,15 @@ def del_stamp(task, d, file_name = None):
|
||||||
stamp = stamp_internal(task, d, file_name)
|
stamp = stamp_internal(task, d, file_name)
|
||||||
bb.utils.remove(stamp)
|
bb.utils.remove(stamp)
|
||||||
|
|
||||||
def write_taint(task, d, file_name = None):
|
def stampfile(taskname, d, file_name = None):
|
||||||
"""
|
|
||||||
Creates a "taint" file which will force the specified task and its
|
|
||||||
dependents to be re-run the next time by influencing the value of its
|
|
||||||
taskhash.
|
|
||||||
(d can be a data dict or dataCache)
|
|
||||||
"""
|
|
||||||
import uuid
|
|
||||||
if file_name:
|
|
||||||
taintfn = d.stamp[file_name] + '.' + task + '.taint'
|
|
||||||
else:
|
|
||||||
taintfn = d.getVar('STAMP') + '.' + task + '.taint'
|
|
||||||
bb.utils.mkdirhier(os.path.dirname(taintfn))
|
|
||||||
# The specific content of the taint file is not really important,
|
|
||||||
# we just need it to be random, so a random UUID is used
|
|
||||||
with open(taintfn, 'w') as taintf:
|
|
||||||
taintf.write(str(uuid.uuid4()))
|
|
||||||
|
|
||||||
def stampfile(taskname, d, file_name = None, noextra=False):
|
|
||||||
"""
|
"""
|
||||||
Return the stamp for a given task
|
Return the stamp for a given task
|
||||||
(d can be a data dict or dataCache)
|
(d can be a data dict or dataCache)
|
||||||
"""
|
"""
|
||||||
return stamp_internal(taskname, d, file_name, noextra=noextra)
|
return stamp_internal(taskname, d, file_name)
|
||||||
|
|
||||||
def add_tasks(tasklist, d):
|
def add_tasks(tasklist, d):
|
||||||
task_deps = d.getVar('_task_deps', False)
|
task_deps = data.getVar('_task_deps', d)
|
||||||
if not task_deps:
|
if not task_deps:
|
||||||
task_deps = {}
|
task_deps = {}
|
||||||
if not 'tasks' in task_deps:
|
if not 'tasks' in task_deps:
|
||||||
|
@ -789,119 +444,38 @@ def add_tasks(tasklist, d):
|
||||||
task_deps['parents'] = {}
|
task_deps['parents'] = {}
|
||||||
|
|
||||||
for task in tasklist:
|
for task in tasklist:
|
||||||
task = d.expand(task)
|
task = data.expand(task, d)
|
||||||
|
data.setVarFlag(task, 'task', 1, d)
|
||||||
d.setVarFlag(task, 'task', 1)
|
|
||||||
|
|
||||||
if not task in task_deps['tasks']:
|
if not task in task_deps['tasks']:
|
||||||
task_deps['tasks'].append(task)
|
task_deps['tasks'].append(task)
|
||||||
|
|
||||||
flags = d.getVarFlags(task)
|
flags = data.getVarFlags(task, d)
|
||||||
def getTask(name):
|
def getTask(name):
|
||||||
if not name in task_deps:
|
if not name in task_deps:
|
||||||
task_deps[name] = {}
|
task_deps[name] = {}
|
||||||
if name in flags:
|
if name in flags:
|
||||||
deptask = d.expand(flags[name])
|
deptask = data.expand(flags[name], d)
|
||||||
task_deps[name][task] = deptask
|
task_deps[name][task] = deptask
|
||||||
getTask('depends')
|
getTask('depends')
|
||||||
getTask('rdepends')
|
|
||||||
getTask('deptask')
|
getTask('deptask')
|
||||||
getTask('rdeptask')
|
getTask('rdeptask')
|
||||||
getTask('recrdeptask')
|
getTask('recrdeptask')
|
||||||
getTask('recideptask')
|
|
||||||
getTask('nostamp')
|
getTask('nostamp')
|
||||||
getTask('fakeroot')
|
getTask('fakeroot')
|
||||||
getTask('noexec')
|
getTask('noexec')
|
||||||
getTask('umask')
|
getTask('umask')
|
||||||
task_deps['parents'][task] = []
|
task_deps['parents'][task] = []
|
||||||
if 'deps' in flags:
|
for dep in flags['deps']:
|
||||||
for dep in flags['deps']:
|
dep = data.expand(dep, d)
|
||||||
dep = d.expand(dep)
|
task_deps['parents'][task].append(dep)
|
||||||
task_deps['parents'][task].append(dep)
|
|
||||||
|
|
||||||
# don't assume holding a reference
|
# don't assume holding a reference
|
||||||
d.setVar('_task_deps', task_deps)
|
data.setVar('_task_deps', task_deps, d)
|
||||||
|
|
||||||
def addtask(task, before, after, d):
|
def remove_task(task, kill, d):
|
||||||
if task[:3] != "do_":
|
"""Remove an BB 'task'.
|
||||||
task = "do_" + task
|
|
||||||
|
|
||||||
d.setVarFlag(task, "task", 1)
|
If kill is 1, also remove tasks that depend on this task."""
|
||||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
|
||||||
if task not in bbtasks:
|
|
||||||
bbtasks.append(task)
|
|
||||||
d.setVar('__BBTASKS', bbtasks)
|
|
||||||
|
|
||||||
existing = d.getVarFlag(task, "deps", False) or []
|
data.delVarFlag(task, 'task', d)
|
||||||
if after is not None:
|
|
||||||
# set up deps for function
|
|
||||||
for entry in after.split():
|
|
||||||
if entry not in existing:
|
|
||||||
existing.append(entry)
|
|
||||||
d.setVarFlag(task, "deps", existing)
|
|
||||||
if before is not None:
|
|
||||||
# set up things that depend on this func
|
|
||||||
for entry in before.split():
|
|
||||||
existing = d.getVarFlag(entry, "deps", False) or []
|
|
||||||
if task not in existing:
|
|
||||||
d.setVarFlag(entry, "deps", [task] + existing)
|
|
||||||
|
|
||||||
def deltask(task, d):
|
|
||||||
if task[:3] != "do_":
|
|
||||||
task = "do_" + task
|
|
||||||
|
|
||||||
bbtasks = d.getVar('__BBTASKS', False) or []
|
|
||||||
if task in bbtasks:
|
|
||||||
bbtasks.remove(task)
|
|
||||||
d.delVarFlag(task, 'task')
|
|
||||||
d.setVar('__BBTASKS', bbtasks)
|
|
||||||
|
|
||||||
d.delVarFlag(task, 'deps')
|
|
||||||
for bbtask in d.getVar('__BBTASKS', False) or []:
|
|
||||||
deps = d.getVarFlag(bbtask, 'deps', False) or []
|
|
||||||
if task in deps:
|
|
||||||
deps.remove(task)
|
|
||||||
d.setVarFlag(bbtask, 'deps', deps)
|
|
||||||
|
|
||||||
def preceedtask(task, with_recrdeptasks, d):
|
|
||||||
"""
|
|
||||||
Returns a set of tasks in the current recipe which were specified as
|
|
||||||
precondition by the task itself ("after") or which listed themselves
|
|
||||||
as precondition ("before"). Preceeding tasks specified via the
|
|
||||||
"recrdeptask" are included in the result only if requested. Beware
|
|
||||||
that this may lead to the task itself being listed.
|
|
||||||
"""
|
|
||||||
preceed = set()
|
|
||||||
preceed.update(d.getVarFlag(task, 'deps') or [])
|
|
||||||
if with_recrdeptasks:
|
|
||||||
recrdeptask = d.getVarFlag(task, 'recrdeptask')
|
|
||||||
if recrdeptask:
|
|
||||||
preceed.update(recrdeptask.split())
|
|
||||||
return preceed
|
|
||||||
|
|
||||||
def tasksbetween(task_start, task_end, d):
|
|
||||||
"""
|
|
||||||
Return the list of tasks between two tasks in the current recipe,
|
|
||||||
where task_start is to start at and task_end is the task to end at
|
|
||||||
(and task_end has a dependency chain back to task_start).
|
|
||||||
"""
|
|
||||||
outtasks = []
|
|
||||||
tasks = list(filter(lambda k: d.getVarFlag(k, "task"), d.keys()))
|
|
||||||
def follow_chain(task, endtask, chain=None):
|
|
||||||
if not chain:
|
|
||||||
chain = []
|
|
||||||
chain.append(task)
|
|
||||||
for othertask in tasks:
|
|
||||||
if othertask == task:
|
|
||||||
continue
|
|
||||||
if task == endtask:
|
|
||||||
for ctask in chain:
|
|
||||||
if ctask not in outtasks:
|
|
||||||
outtasks.append(ctask)
|
|
||||||
else:
|
|
||||||
deps = d.getVarFlag(othertask, 'deps', False)
|
|
||||||
if task in deps:
|
|
||||||
follow_chain(othertask, endtask, chain)
|
|
||||||
chain.pop()
|
|
||||||
follow_chain(task_start, task_end)
|
|
||||||
return outtasks
|
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
#
|
#
|
||||||
# BitBake Cache implementation
|
# BitBake 'Event' implementation
|
||||||
#
|
#
|
||||||
# Caching of bitbake variables before task execution
|
# Caching of bitbake variables before task execution
|
||||||
|
|
||||||
# Copyright (C) 2006 Richard Purdie
|
# Copyright (C) 2006 Richard Purdie
|
||||||
# Copyright (C) 2012 Intel Corporation
|
|
||||||
|
|
||||||
# but small sections based on code from bin/bitbake:
|
# but small sections based on code from bin/bitbake:
|
||||||
# Copyright (C) 2003, 2004 Chris Larson
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
@ -28,19 +27,26 @@
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import logging
|
import logging
|
||||||
import pickle
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
import bb.data
|
||||||
import bb.utils
|
import bb.utils
|
||||||
|
|
||||||
logger = logging.getLogger("BitBake.Cache")
|
logger = logging.getLogger("BitBake.Cache")
|
||||||
|
|
||||||
__cache_version__ = "151"
|
try:
|
||||||
|
import cPickle as pickle
|
||||||
|
except ImportError:
|
||||||
|
import pickle
|
||||||
|
logger.info("Importing cPickle failed. "
|
||||||
|
"Falling back to a very slow implementation.")
|
||||||
|
|
||||||
def getCacheFile(path, filename, data_hash):
|
__cache_version__ = "142"
|
||||||
return os.path.join(path, filename + "." + data_hash)
|
|
||||||
|
def getCacheFile(path, filename):
|
||||||
|
return os.path.join(path, filename)
|
||||||
|
|
||||||
# RecipeInfoCommon defines common data retrieving methods
|
# RecipeInfoCommon defines common data retrieving methods
|
||||||
# from meta data for caches. CoreRecipeInfo as well as other
|
# from meta data for caches. CoreRecipeInfo as well as other
|
||||||
|
@ -70,17 +76,13 @@ class RecipeInfoCommon(object):
|
||||||
for task in tasks)
|
for task in tasks)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def flaglist(cls, flag, varlist, metadata, squash=False):
|
def flaglist(cls, flag, varlist, metadata):
|
||||||
out_dict = dict((var, metadata.getVarFlag(var, flag))
|
return dict((var, metadata.getVarFlag(var, flag, True))
|
||||||
for var in varlist)
|
for var in varlist)
|
||||||
if squash:
|
|
||||||
return dict((k,v) for (k,v) in out_dict.items() if v)
|
|
||||||
else:
|
|
||||||
return out_dict
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def getvar(cls, var, metadata, expand = True):
|
def getvar(cls, var, metadata):
|
||||||
return metadata.getVar(var, expand) or ''
|
return metadata.getVar(var, True) or ''
|
||||||
|
|
||||||
|
|
||||||
class CoreRecipeInfo(RecipeInfoCommon):
|
class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
|
@ -93,7 +95,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
self.timestamp = bb.parse.cached_mtime(filename)
|
self.timestamp = bb.parse.cached_mtime(filename)
|
||||||
self.variants = self.listvar('__VARIANTS', metadata) + ['']
|
self.variants = self.listvar('__VARIANTS', metadata) + ['']
|
||||||
self.appends = self.listvar('__BBAPPEND', metadata)
|
self.appends = self.listvar('__BBAPPEND', metadata)
|
||||||
self.nocache = self.getvar('BB_DONT_CACHE', metadata)
|
self.nocache = self.getvar('__BB_DONT_CACHE', metadata)
|
||||||
|
|
||||||
self.skipreason = self.getvar('__SKIPPED', metadata)
|
self.skipreason = self.getvar('__SKIPPED', metadata)
|
||||||
if self.skipreason:
|
if self.skipreason:
|
||||||
|
@ -113,6 +115,7 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
|
self.basetaskhashes = self.taskvar('BB_BASEHASH', self.tasks, metadata)
|
||||||
self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
|
self.hashfilename = self.getvar('BB_HASHFILENAME', metadata)
|
||||||
|
|
||||||
|
self.file_depends = metadata.getVar('__depends', False)
|
||||||
self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
|
self.task_deps = metadata.getVar('_task_deps', False) or {'tasks': [], 'parents': {}}
|
||||||
|
|
||||||
self.skipped = False
|
self.skipped = False
|
||||||
|
@ -120,11 +123,11 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
self.pv = self.getvar('PV', metadata)
|
self.pv = self.getvar('PV', metadata)
|
||||||
self.pr = self.getvar('PR', metadata)
|
self.pr = self.getvar('PR', metadata)
|
||||||
self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
|
self.defaultpref = self.intvar('DEFAULT_PREFERENCE', metadata)
|
||||||
|
self.broken = self.getvar('BROKEN', metadata)
|
||||||
self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
|
self.not_world = self.getvar('EXCLUDE_FROM_WORLD', metadata)
|
||||||
self.stamp = self.getvar('STAMP', metadata)
|
self.stamp = self.getvar('STAMP', metadata)
|
||||||
self.stampclean = self.getvar('STAMPCLEAN', metadata)
|
self.stamp_base = self.flaglist('stamp-base', self.tasks, metadata)
|
||||||
self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
|
self.stamp_extrainfo = self.flaglist('stamp-extra-info', self.tasks, metadata)
|
||||||
self.file_checksums = self.flaglist('file-checksums', self.tasks, metadata, True)
|
|
||||||
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
|
self.packages_dynamic = self.listvar('PACKAGES_DYNAMIC', metadata)
|
||||||
self.depends = self.depvar('DEPENDS', metadata)
|
self.depends = self.depvar('DEPENDS', metadata)
|
||||||
self.provides = self.depvar('PROVIDES', metadata)
|
self.provides = self.depvar('PROVIDES', metadata)
|
||||||
|
@ -134,11 +137,12 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
|
self.rprovides_pkg = self.pkgvar('RPROVIDES', self.packages, metadata)
|
||||||
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
|
self.rdepends_pkg = self.pkgvar('RDEPENDS', self.packages, metadata)
|
||||||
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
|
self.rrecommends_pkg = self.pkgvar('RRECOMMENDS', self.packages, metadata)
|
||||||
self.inherits = self.getvar('__inherit_cache', metadata, expand=False)
|
self.inherits = self.getvar('__inherit_cache', metadata)
|
||||||
|
self.summary = self.getvar('SUMMARY', metadata)
|
||||||
|
self.license = self.getvar('LICENSE', metadata)
|
||||||
|
self.section = self.getvar('SECTION', metadata)
|
||||||
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
|
self.fakerootenv = self.getvar('FAKEROOTENV', metadata)
|
||||||
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
|
self.fakerootdirs = self.getvar('FAKEROOTDIRS', metadata)
|
||||||
self.fakerootnoenv = self.getvar('FAKEROOTNOENV', metadata)
|
|
||||||
self.extradepsfunc = self.getvar('calculate_extra_depends', metadata)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def init_cacheData(cls, cachedata):
|
def init_cacheData(cls, cachedata):
|
||||||
|
@ -150,9 +154,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
cachedata.pkg_dp = {}
|
cachedata.pkg_dp = {}
|
||||||
|
|
||||||
cachedata.stamp = {}
|
cachedata.stamp = {}
|
||||||
cachedata.stampclean = {}
|
cachedata.stamp_base = {}
|
||||||
cachedata.stamp_extrainfo = {}
|
cachedata.stamp_extrainfo = {}
|
||||||
cachedata.file_checksums = {}
|
|
||||||
cachedata.fn_provides = {}
|
cachedata.fn_provides = {}
|
||||||
cachedata.pn_provides = defaultdict(list)
|
cachedata.pn_provides = defaultdict(list)
|
||||||
cachedata.all_depends = []
|
cachedata.all_depends = []
|
||||||
|
@ -171,10 +174,11 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
|
|
||||||
cachedata.basetaskhash = {}
|
cachedata.basetaskhash = {}
|
||||||
cachedata.inherits = {}
|
cachedata.inherits = {}
|
||||||
|
cachedata.summary = {}
|
||||||
|
cachedata.license = {}
|
||||||
|
cachedata.section = {}
|
||||||
cachedata.fakerootenv = {}
|
cachedata.fakerootenv = {}
|
||||||
cachedata.fakerootnoenv = {}
|
|
||||||
cachedata.fakerootdirs = {}
|
cachedata.fakerootdirs = {}
|
||||||
cachedata.extradepsfunc = {}
|
|
||||||
|
|
||||||
def add_cacheData(self, cachedata, fn):
|
def add_cacheData(self, cachedata, fn):
|
||||||
cachedata.task_deps[fn] = self.task_deps
|
cachedata.task_deps[fn] = self.task_deps
|
||||||
|
@ -183,9 +187,8 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
|
cachedata.pkg_pepvpr[fn] = (self.pe, self.pv, self.pr)
|
||||||
cachedata.pkg_dp[fn] = self.defaultpref
|
cachedata.pkg_dp[fn] = self.defaultpref
|
||||||
cachedata.stamp[fn] = self.stamp
|
cachedata.stamp[fn] = self.stamp
|
||||||
cachedata.stampclean[fn] = self.stampclean
|
cachedata.stamp_base[fn] = self.stamp_base
|
||||||
cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
|
cachedata.stamp_extrainfo[fn] = self.stamp_extrainfo
|
||||||
cachedata.file_checksums[fn] = self.file_checksums
|
|
||||||
|
|
||||||
provides = [self.pn]
|
provides = [self.pn]
|
||||||
for provide in self.provides:
|
for provide in self.provides:
|
||||||
|
@ -210,22 +213,19 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
rprovides += self.rprovides_pkg[package]
|
rprovides += self.rprovides_pkg[package]
|
||||||
|
|
||||||
for rprovide in rprovides:
|
for rprovide in rprovides:
|
||||||
if fn not in cachedata.rproviders[rprovide]:
|
cachedata.rproviders[rprovide].append(fn)
|
||||||
cachedata.rproviders[rprovide].append(fn)
|
|
||||||
|
|
||||||
for package in self.packages_dynamic:
|
for package in self.packages_dynamic:
|
||||||
cachedata.packages_dynamic[package].append(fn)
|
cachedata.packages_dynamic[package].append(fn)
|
||||||
|
|
||||||
# Build hash of runtime depends and recommends
|
# Build hash of runtime depends and rececommends
|
||||||
for package in self.packages + [self.pn]:
|
for package in self.packages + [self.pn]:
|
||||||
cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
|
cachedata.rundeps[fn][package] = list(self.rdepends) + self.rdepends_pkg[package]
|
||||||
cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
|
cachedata.runrecs[fn][package] = list(self.rrecommends) + self.rrecommends_pkg[package]
|
||||||
|
|
||||||
# Collect files we may need for possible world-dep
|
# Collect files we may need for possible world-dep
|
||||||
# calculations
|
# calculations
|
||||||
if self.not_world:
|
if not self.broken and not self.not_world:
|
||||||
logger.debug(1, "EXCLUDE FROM WORLD: %s", fn)
|
|
||||||
else:
|
|
||||||
cachedata.possible_world.append(fn)
|
cachedata.possible_world.append(fn)
|
||||||
|
|
||||||
# create a collection of all targets for sanity checking
|
# create a collection of all targets for sanity checking
|
||||||
|
@ -234,157 +234,36 @@ class CoreRecipeInfo(RecipeInfoCommon):
|
||||||
cachedata.universe_target.append(self.pn)
|
cachedata.universe_target.append(self.pn)
|
||||||
|
|
||||||
cachedata.hashfn[fn] = self.hashfilename
|
cachedata.hashfn[fn] = self.hashfilename
|
||||||
for task, taskhash in self.basetaskhashes.items():
|
for task, taskhash in self.basetaskhashes.iteritems():
|
||||||
identifier = '%s.%s' % (fn, task)
|
identifier = '%s.%s' % (fn, task)
|
||||||
cachedata.basetaskhash[identifier] = taskhash
|
cachedata.basetaskhash[identifier] = taskhash
|
||||||
|
|
||||||
cachedata.inherits[fn] = self.inherits
|
cachedata.inherits[fn] = self.inherits
|
||||||
|
cachedata.summary[fn] = self.summary
|
||||||
|
cachedata.license[fn] = self.license
|
||||||
|
cachedata.section[fn] = self.section
|
||||||
cachedata.fakerootenv[fn] = self.fakerootenv
|
cachedata.fakerootenv[fn] = self.fakerootenv
|
||||||
cachedata.fakerootnoenv[fn] = self.fakerootnoenv
|
|
||||||
cachedata.fakerootdirs[fn] = self.fakerootdirs
|
cachedata.fakerootdirs[fn] = self.fakerootdirs
|
||||||
cachedata.extradepsfunc[fn] = self.extradepsfunc
|
|
||||||
|
|
||||||
def virtualfn2realfn(virtualfn):
|
|
||||||
"""
|
|
||||||
Convert a virtual file name to a real one + the associated subclass keyword
|
|
||||||
"""
|
|
||||||
mc = ""
|
|
||||||
if virtualfn.startswith('multiconfig:'):
|
|
||||||
elems = virtualfn.split(':')
|
|
||||||
mc = elems[1]
|
|
||||||
virtualfn = ":".join(elems[2:])
|
|
||||||
|
|
||||||
fn = virtualfn
|
|
||||||
cls = ""
|
|
||||||
if virtualfn.startswith('virtual:'):
|
|
||||||
elems = virtualfn.split(':')
|
|
||||||
cls = ":".join(elems[1:-1])
|
|
||||||
fn = elems[-1]
|
|
||||||
|
|
||||||
return (fn, cls, mc)
|
|
||||||
|
|
||||||
def realfn2virtual(realfn, cls, mc):
|
|
||||||
"""
|
|
||||||
Convert a real filename + the associated subclass keyword to a virtual filename
|
|
||||||
"""
|
|
||||||
if cls:
|
|
||||||
realfn = "virtual:" + cls + ":" + realfn
|
|
||||||
if mc:
|
|
||||||
realfn = "multiconfig:" + mc + ":" + realfn
|
|
||||||
return realfn
|
|
||||||
|
|
||||||
def variant2virtual(realfn, variant):
|
|
||||||
"""
|
|
||||||
Convert a real filename + the associated subclass keyword to a virtual filename
|
|
||||||
"""
|
|
||||||
if variant == "":
|
|
||||||
return realfn
|
|
||||||
if variant.startswith("multiconfig:"):
|
|
||||||
elems = variant.split(":")
|
|
||||||
if elems[2]:
|
|
||||||
return "multiconfig:" + elems[1] + ":virtual:" + ":".join(elems[2:]) + ":" + realfn
|
|
||||||
return "multiconfig:" + elems[1] + ":" + realfn
|
|
||||||
return "virtual:" + variant + ":" + realfn
|
|
||||||
|
|
||||||
def parse_recipe(bb_data, bbfile, appends, mc=''):
|
|
||||||
"""
|
|
||||||
Parse a recipe
|
|
||||||
"""
|
|
||||||
|
|
||||||
chdir_back = False
|
|
||||||
|
|
||||||
bb_data.setVar("__BBMULTICONFIG", mc)
|
|
||||||
|
|
||||||
# expand tmpdir to include this topdir
|
|
||||||
bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "")
|
|
||||||
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
|
||||||
oldpath = os.path.abspath(os.getcwd())
|
|
||||||
bb.parse.cached_mtime_noerror(bbfile_loc)
|
|
||||||
|
|
||||||
# The ConfHandler first looks if there is a TOPDIR and if not
|
|
||||||
# then it would call getcwd().
|
|
||||||
# Previously, we chdir()ed to bbfile_loc, called the handler
|
|
||||||
# and finally chdir()ed back, a couple of thousand times. We now
|
|
||||||
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
|
||||||
if not bb_data.getVar('TOPDIR', False):
|
|
||||||
chdir_back = True
|
|
||||||
bb_data.setVar('TOPDIR', bbfile_loc)
|
|
||||||
try:
|
|
||||||
if appends:
|
|
||||||
bb_data.setVar('__BBAPPEND', " ".join(appends))
|
|
||||||
bb_data = bb.parse.handle(bbfile, bb_data)
|
|
||||||
if chdir_back:
|
|
||||||
os.chdir(oldpath)
|
|
||||||
return bb_data
|
|
||||||
except:
|
|
||||||
if chdir_back:
|
|
||||||
os.chdir(oldpath)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class NoCache(object):
|
class Cache(object):
|
||||||
|
|
||||||
def __init__(self, databuilder):
|
|
||||||
self.databuilder = databuilder
|
|
||||||
self.data = databuilder.data
|
|
||||||
|
|
||||||
def loadDataFull(self, virtualfn, appends):
|
|
||||||
"""
|
|
||||||
Return a complete set of data for fn.
|
|
||||||
To do this, we need to parse the file.
|
|
||||||
"""
|
|
||||||
logger.debug(1, "Parsing %s (full)" % virtualfn)
|
|
||||||
(fn, virtual, mc) = virtualfn2realfn(virtualfn)
|
|
||||||
bb_data = self.load_bbfile(virtualfn, appends, virtonly=True)
|
|
||||||
return bb_data[virtual]
|
|
||||||
|
|
||||||
def load_bbfile(self, bbfile, appends, virtonly = False):
|
|
||||||
"""
|
|
||||||
Load and parse one .bb build file
|
|
||||||
Return the data and whether parsing resulted in the file being skipped
|
|
||||||
"""
|
|
||||||
|
|
||||||
if virtonly:
|
|
||||||
(bbfile, virtual, mc) = virtualfn2realfn(bbfile)
|
|
||||||
bb_data = self.databuilder.mcdata[mc].createCopy()
|
|
||||||
bb_data.setVar("__ONLYFINALISE", virtual or "default")
|
|
||||||
datastores = parse_recipe(bb_data, bbfile, appends, mc)
|
|
||||||
return datastores
|
|
||||||
|
|
||||||
bb_data = self.data.createCopy()
|
|
||||||
datastores = parse_recipe(bb_data, bbfile, appends)
|
|
||||||
|
|
||||||
for mc in self.databuilder.mcdata:
|
|
||||||
if not mc:
|
|
||||||
continue
|
|
||||||
bb_data = self.databuilder.mcdata[mc].createCopy()
|
|
||||||
newstores = parse_recipe(bb_data, bbfile, appends, mc)
|
|
||||||
for ns in newstores:
|
|
||||||
datastores["multiconfig:%s:%s" % (mc, ns)] = newstores[ns]
|
|
||||||
|
|
||||||
return datastores
|
|
||||||
|
|
||||||
class Cache(NoCache):
|
|
||||||
"""
|
"""
|
||||||
BitBake Cache implementation
|
BitBake Cache implementation
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, databuilder, data_hash, caches_array):
|
def __init__(self, data, caches_array):
|
||||||
super().__init__(databuilder)
|
|
||||||
data = databuilder.data
|
|
||||||
|
|
||||||
# Pass caches_array information into Cache Constructor
|
# Pass caches_array information into Cache Constructor
|
||||||
# It will be used later for deciding whether we
|
# It will be used in later for deciding whether we
|
||||||
# need extra cache file dump/load support
|
# need extra cache file dump/load support
|
||||||
self.caches_array = caches_array
|
self.caches_array = caches_array
|
||||||
self.cachedir = data.getVar("CACHE")
|
self.cachedir = bb.data.getVar("CACHE", data, True)
|
||||||
self.clean = set()
|
self.clean = set()
|
||||||
self.checked = set()
|
self.checked = set()
|
||||||
self.depends_cache = {}
|
self.depends_cache = {}
|
||||||
|
self.data = None
|
||||||
self.data_fn = None
|
self.data_fn = None
|
||||||
self.cacheclean = True
|
self.cacheclean = True
|
||||||
self.data_hash = data_hash
|
|
||||||
|
|
||||||
if self.cachedir in [None, '']:
|
if self.cachedir in [None, '']:
|
||||||
self.has_cache = False
|
self.has_cache = False
|
||||||
|
@ -393,86 +272,89 @@ class Cache(NoCache):
|
||||||
return
|
return
|
||||||
|
|
||||||
self.has_cache = True
|
self.has_cache = True
|
||||||
self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat", self.data_hash)
|
self.cachefile = getCacheFile(self.cachedir, "bb_cache.dat")
|
||||||
|
|
||||||
logger.debug(1, "Using cache in '%s'", self.cachedir)
|
logger.debug(1, "Using cache in '%s'", self.cachedir)
|
||||||
bb.utils.mkdirhier(self.cachedir)
|
bb.utils.mkdirhier(self.cachedir)
|
||||||
|
|
||||||
|
# If any of configuration.data's dependencies are newer than the
|
||||||
|
# cache there isn't even any point in loading it...
|
||||||
|
newest_mtime = 0
|
||||||
|
deps = bb.data.getVar("__base_depends", data)
|
||||||
|
|
||||||
|
old_mtimes = [old_mtime for _, old_mtime in deps]
|
||||||
|
old_mtimes.append(newest_mtime)
|
||||||
|
newest_mtime = max(old_mtimes)
|
||||||
|
|
||||||
cache_ok = True
|
cache_ok = True
|
||||||
if self.caches_array:
|
if self.caches_array:
|
||||||
for cache_class in self.caches_array:
|
for cache_class in self.caches_array:
|
||||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
cache_ok = cache_ok and os.path.exists(cachefile)
|
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||||
cache_class.init_cacheData(self)
|
cache_ok = cache_ok and (bb.parse.cached_mtime_noerror(cachefile) >= newest_mtime)
|
||||||
|
cache_class.init_cacheData(self)
|
||||||
if cache_ok:
|
if cache_ok:
|
||||||
self.load_cachefile()
|
self.load_cachefile()
|
||||||
elif os.path.isfile(self.cachefile):
|
elif os.path.isfile(self.cachefile):
|
||||||
logger.info("Out of date cache found, rebuilding...")
|
logger.info("Out of date cache found, rebuilding...")
|
||||||
|
|
||||||
def load_cachefile(self):
|
def load_cachefile(self):
|
||||||
|
# Firstly, using core cache file information for
|
||||||
|
# valid checking
|
||||||
|
with open(self.cachefile, "rb") as cachefile:
|
||||||
|
pickled = pickle.Unpickler(cachefile)
|
||||||
|
try:
|
||||||
|
cache_ver = pickled.load()
|
||||||
|
bitbake_ver = pickled.load()
|
||||||
|
except Exception:
|
||||||
|
logger.info('Invalid cache, rebuilding...')
|
||||||
|
return
|
||||||
|
|
||||||
|
if cache_ver != __cache_version__:
|
||||||
|
logger.info('Cache version mismatch, rebuilding...')
|
||||||
|
return
|
||||||
|
elif bitbake_ver != bb.__version__:
|
||||||
|
logger.info('Bitbake version mismatch, rebuilding...')
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
cachesize = 0
|
cachesize = 0
|
||||||
previous_progress = 0
|
previous_progress = 0
|
||||||
previous_percent = 0
|
previous_percent = 0
|
||||||
|
|
||||||
# Calculate the correct cachesize of all those cache files
|
# Calculate the correct cachesize of all those cache files
|
||||||
for cache_class in self.caches_array:
|
for cache_class in self.caches_array:
|
||||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
with open(cachefile, "rb") as cachefile:
|
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||||
cachesize += os.fstat(cachefile.fileno()).st_size
|
with open(cachefile, "rb") as cachefile:
|
||||||
|
cachesize += os.fstat(cachefile.fileno()).st_size
|
||||||
|
|
||||||
bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
|
bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
|
||||||
|
|
||||||
for cache_class in self.caches_array:
|
for cache_class in self.caches_array:
|
||||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
with open(cachefile, "rb") as cachefile:
|
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||||
pickled = pickle.Unpickler(cachefile)
|
with open(cachefile, "rb") as cachefile:
|
||||||
# Check cache version information
|
pickled = pickle.Unpickler(cachefile)
|
||||||
try:
|
while cachefile:
|
||||||
cache_ver = pickled.load()
|
try:
|
||||||
bitbake_ver = pickled.load()
|
key = pickled.load()
|
||||||
except Exception:
|
value = pickled.load()
|
||||||
logger.info('Invalid cache, rebuilding...')
|
except Exception:
|
||||||
return
|
break
|
||||||
|
if self.depends_cache.has_key(key):
|
||||||
|
self.depends_cache[key].append(value)
|
||||||
|
else:
|
||||||
|
self.depends_cache[key] = [value]
|
||||||
|
# only fire events on even percentage boundaries
|
||||||
|
current_progress = cachefile.tell() + previous_progress
|
||||||
|
current_percent = 100 * current_progress / cachesize
|
||||||
|
if current_percent > previous_percent:
|
||||||
|
previous_percent = current_percent
|
||||||
|
bb.event.fire(bb.event.CacheLoadProgress(current_progress),
|
||||||
|
self.data)
|
||||||
|
|
||||||
if cache_ver != __cache_version__:
|
previous_progress += current_progress
|
||||||
logger.info('Cache version mismatch, rebuilding...')
|
|
||||||
return
|
|
||||||
elif bitbake_ver != bb.__version__:
|
|
||||||
logger.info('Bitbake version mismatch, rebuilding...')
|
|
||||||
return
|
|
||||||
|
|
||||||
# Load the rest of the cache file
|
|
||||||
current_progress = 0
|
|
||||||
while cachefile:
|
|
||||||
try:
|
|
||||||
key = pickled.load()
|
|
||||||
value = pickled.load()
|
|
||||||
except Exception:
|
|
||||||
break
|
|
||||||
if not isinstance(key, str):
|
|
||||||
bb.warn("%s from extras cache is not a string?" % key)
|
|
||||||
break
|
|
||||||
if not isinstance(value, RecipeInfoCommon):
|
|
||||||
bb.warn("%s from extras cache is not a RecipeInfoCommon class?" % value)
|
|
||||||
break
|
|
||||||
|
|
||||||
if key in self.depends_cache:
|
|
||||||
self.depends_cache[key].append(value)
|
|
||||||
else:
|
|
||||||
self.depends_cache[key] = [value]
|
|
||||||
# only fire events on even percentage boundaries
|
|
||||||
current_progress = cachefile.tell() + previous_progress
|
|
||||||
if current_progress > cachesize:
|
|
||||||
# we might have calculated incorrect total size because a file
|
|
||||||
# might've been written out just after we checked its size
|
|
||||||
cachesize = current_progress
|
|
||||||
current_percent = 100 * current_progress / cachesize
|
|
||||||
if current_percent > previous_percent:
|
|
||||||
previous_percent = current_percent
|
|
||||||
bb.event.fire(bb.event.CacheLoadProgress(current_progress, cachesize),
|
|
||||||
self.data)
|
|
||||||
|
|
||||||
previous_progress += current_progress
|
|
||||||
|
|
||||||
# Note: depends cache number is corresponding to the parsing file numbers.
|
# Note: depends cache number is corresponding to the parsing file numbers.
|
||||||
# The same file has several caches, still regarded as one item in the cache
|
# The same file has several caches, still regarded as one item in the cache
|
||||||
|
@ -480,33 +362,69 @@ class Cache(NoCache):
|
||||||
len(self.depends_cache)),
|
len(self.depends_cache)),
|
||||||
self.data)
|
self.data)
|
||||||
|
|
||||||
def parse(self, filename, appends):
|
|
||||||
|
@staticmethod
|
||||||
|
def virtualfn2realfn(virtualfn):
|
||||||
|
"""
|
||||||
|
Convert a virtual file name to a real one + the associated subclass keyword
|
||||||
|
"""
|
||||||
|
|
||||||
|
fn = virtualfn
|
||||||
|
cls = ""
|
||||||
|
if virtualfn.startswith('virtual:'):
|
||||||
|
elems = virtualfn.split(':')
|
||||||
|
cls = ":".join(elems[1:-1])
|
||||||
|
fn = elems[-1]
|
||||||
|
return (fn, cls)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def realfn2virtual(realfn, cls):
|
||||||
|
"""
|
||||||
|
Convert a real filename + the associated subclass keyword to a virtual filename
|
||||||
|
"""
|
||||||
|
if cls == "":
|
||||||
|
return realfn
|
||||||
|
return "virtual:" + cls + ":" + realfn
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def loadDataFull(cls, virtualfn, appends, cfgData):
|
||||||
|
"""
|
||||||
|
Return a complete set of data for fn.
|
||||||
|
To do this, we need to parse the file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
(fn, virtual) = cls.virtualfn2realfn(virtualfn)
|
||||||
|
|
||||||
|
logger.debug(1, "Parsing %s (full)", fn)
|
||||||
|
|
||||||
|
cfgData.setVar("__ONLYFINALISE", virtual or "default")
|
||||||
|
bb_data = cls.load_bbfile(fn, appends, cfgData)
|
||||||
|
return bb_data[virtual]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse(cls, filename, appends, configdata, caches_array):
|
||||||
"""Parse the specified filename, returning the recipe information"""
|
"""Parse the specified filename, returning the recipe information"""
|
||||||
logger.debug(1, "Parsing %s", filename)
|
|
||||||
infos = []
|
infos = []
|
||||||
datastores = self.load_bbfile(filename, appends)
|
datastores = cls.load_bbfile(filename, appends, configdata)
|
||||||
depends = []
|
depends = set()
|
||||||
variants = []
|
for variant, data in sorted(datastores.iteritems(),
|
||||||
# Process the "real" fn last so we can store variants list
|
|
||||||
for variant, data in sorted(datastores.items(),
|
|
||||||
key=lambda i: i[0],
|
key=lambda i: i[0],
|
||||||
reverse=True):
|
reverse=True):
|
||||||
virtualfn = variant2virtual(filename, variant)
|
virtualfn = cls.realfn2virtual(filename, variant)
|
||||||
variants.append(variant)
|
depends |= (data.getVar("__depends", False) or set())
|
||||||
depends = depends + (data.getVar("__depends", False) or [])
|
|
||||||
if depends and not variant:
|
if depends and not variant:
|
||||||
data.setVar("__depends", depends)
|
data.setVar("__depends", depends)
|
||||||
if virtualfn == filename:
|
|
||||||
data.setVar("__VARIANTS", " ".join(variants))
|
|
||||||
info_array = []
|
info_array = []
|
||||||
for cache_class in self.caches_array:
|
for cache_class in caches_array:
|
||||||
info = cache_class(filename, data)
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
info_array.append(info)
|
info = cache_class(filename, data)
|
||||||
|
info_array.append(info)
|
||||||
infos.append((virtualfn, info_array))
|
infos.append((virtualfn, info_array))
|
||||||
|
|
||||||
return infos
|
return infos
|
||||||
|
|
||||||
def load(self, filename, appends):
|
def load(self, filename, appends, configdata):
|
||||||
"""Obtain the recipe information for the specified filename,
|
"""Obtain the recipe information for the specified filename,
|
||||||
using cached values if available, otherwise parsing.
|
using cached values if available, otherwise parsing.
|
||||||
|
|
||||||
|
@ -520,20 +438,21 @@ class Cache(NoCache):
|
||||||
# info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
|
# info_array item is a list of [CoreRecipeInfo, XXXRecipeInfo]
|
||||||
info_array = self.depends_cache[filename]
|
info_array = self.depends_cache[filename]
|
||||||
for variant in info_array[0].variants:
|
for variant in info_array[0].variants:
|
||||||
virtualfn = variant2virtual(filename, variant)
|
virtualfn = self.realfn2virtual(filename, variant)
|
||||||
infos.append((virtualfn, self.depends_cache[virtualfn]))
|
infos.append((virtualfn, self.depends_cache[virtualfn]))
|
||||||
else:
|
else:
|
||||||
|
logger.debug(1, "Parsing %s", filename)
|
||||||
return self.parse(filename, appends, configdata, self.caches_array)
|
return self.parse(filename, appends, configdata, self.caches_array)
|
||||||
|
|
||||||
return cached, infos
|
return cached, infos
|
||||||
|
|
||||||
def loadData(self, fn, appends, cacheData):
|
def loadData(self, fn, appends, cfgData, cacheData):
|
||||||
"""Load the recipe info for the specified filename,
|
"""Load the recipe info for the specified filename,
|
||||||
parsing and adding to the cache if necessary, and adding
|
parsing and adding to the cache if necessary, and adding
|
||||||
the recipe information to the supplied CacheData instance."""
|
the recipe information to the supplied CacheData instance."""
|
||||||
skipped, virtuals = 0, 0
|
skipped, virtuals = 0, 0
|
||||||
|
|
||||||
cached, infos = self.load(fn, appends)
|
cached, infos = self.load(fn, appends, cfgData)
|
||||||
for virtualfn, info_array in infos:
|
for virtualfn, info_array in infos:
|
||||||
if info_array[0].skipped:
|
if info_array[0].skipped:
|
||||||
logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
|
logger.debug(1, "Skipping %s: %s", virtualfn, info_array[0].skipreason)
|
||||||
|
@ -608,52 +527,24 @@ class Cache(NoCache):
|
||||||
self.remove(fn)
|
self.remove(fn)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if hasattr(info_array[0], 'file_checksums'):
|
|
||||||
for _, fl in info_array[0].file_checksums.items():
|
|
||||||
fl = fl.strip()
|
|
||||||
while fl:
|
|
||||||
# A .split() would be simpler but means spaces or colons in filenames would break
|
|
||||||
a = fl.find(":True")
|
|
||||||
b = fl.find(":False")
|
|
||||||
if ((a < 0) and b) or ((b > 0) and (b < a)):
|
|
||||||
f = fl[:b+6]
|
|
||||||
fl = fl[b+7:]
|
|
||||||
elif ((b < 0) and a) or ((a > 0) and (a < b)):
|
|
||||||
f = fl[:a+5]
|
|
||||||
fl = fl[a+6:]
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
fl = fl.strip()
|
|
||||||
if "*" in f:
|
|
||||||
continue
|
|
||||||
f, exist = f.split(":")
|
|
||||||
if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
|
|
||||||
logger.debug(2, "Cache: %s's file checksum list file %s changed",
|
|
||||||
fn, f)
|
|
||||||
self.remove(fn)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if appends != info_array[0].appends:
|
if appends != info_array[0].appends:
|
||||||
logger.debug(2, "Cache: appends for %s changed", fn)
|
logger.debug(2, "Cache: appends for %s changed", fn)
|
||||||
logger.debug(2, "%s to %s" % (str(appends), str(info_array[0].appends)))
|
bb.note("%s to %s" % (str(appends), str(info_array[0].appends)))
|
||||||
self.remove(fn)
|
self.remove(fn)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
invalid = False
|
invalid = False
|
||||||
for cls in info_array[0].variants:
|
for cls in info_array[0].variants:
|
||||||
virtualfn = variant2virtual(fn, cls)
|
virtualfn = self.realfn2virtual(fn, cls)
|
||||||
self.clean.add(virtualfn)
|
self.clean.add(virtualfn)
|
||||||
if virtualfn not in self.depends_cache:
|
if virtualfn not in self.depends_cache:
|
||||||
logger.debug(2, "Cache: %s is not cached", virtualfn)
|
logger.debug(2, "Cache: %s is not cached", virtualfn)
|
||||||
invalid = True
|
invalid = True
|
||||||
elif len(self.depends_cache[virtualfn]) != len(self.caches_array):
|
|
||||||
logger.debug(2, "Cache: Extra caches missing for %s?" % virtualfn)
|
|
||||||
invalid = True
|
|
||||||
|
|
||||||
# If any one of the variants is not present, mark as invalid for all
|
# If any one of the variants is not present, mark as invalid for all
|
||||||
if invalid:
|
if invalid:
|
||||||
for cls in info_array[0].variants:
|
for cls in info_array[0].variants:
|
||||||
virtualfn = variant2virtual(fn, cls)
|
virtualfn = self.realfn2virtual(fn, cls)
|
||||||
if virtualfn in self.clean:
|
if virtualfn in self.clean:
|
||||||
logger.debug(2, "Cache: Removing %s from cache", virtualfn)
|
logger.debug(2, "Cache: Removing %s from cache", virtualfn)
|
||||||
self.clean.remove(virtualfn)
|
self.clean.remove(virtualfn)
|
||||||
|
@ -690,19 +581,30 @@ class Cache(NoCache):
|
||||||
logger.debug(2, "Cache is clean, not saving.")
|
logger.debug(2, "Cache is clean, not saving.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
file_dict = {}
|
||||||
|
pickler_dict = {}
|
||||||
for cache_class in self.caches_array:
|
for cache_class in self.caches_array:
|
||||||
cache_class_name = cache_class.__name__
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
cachefile = getCacheFile(self.cachedir, cache_class.cachefile, self.data_hash)
|
cache_class_name = cache_class.__name__
|
||||||
with open(cachefile, "wb") as f:
|
cachefile = getCacheFile(self.cachedir, cache_class.cachefile)
|
||||||
p = pickle.Pickler(f, pickle.HIGHEST_PROTOCOL)
|
file_dict[cache_class_name] = open(cachefile, "wb")
|
||||||
p.dump(__cache_version__)
|
pickler_dict[cache_class_name] = pickle.Pickler(file_dict[cache_class_name], pickle.HIGHEST_PROTOCOL)
|
||||||
p.dump(bb.__version__)
|
|
||||||
|
|
||||||
for key, info_array in self.depends_cache.items():
|
pickler_dict['CoreRecipeInfo'].dump(__cache_version__)
|
||||||
for info in info_array:
|
pickler_dict['CoreRecipeInfo'].dump(bb.__version__)
|
||||||
if isinstance(info, RecipeInfoCommon) and info.__class__.__name__ == cache_class_name:
|
|
||||||
p.dump(key)
|
try:
|
||||||
p.dump(info)
|
for key, info_array in self.depends_cache.iteritems():
|
||||||
|
for info in info_array:
|
||||||
|
if isinstance(info, RecipeInfoCommon):
|
||||||
|
cache_class_name = info.__class__.__name__
|
||||||
|
pickler_dict[cache_class_name].dump(key)
|
||||||
|
pickler_dict[cache_class_name].dump(info)
|
||||||
|
finally:
|
||||||
|
for cache_class in self.caches_array:
|
||||||
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
|
cache_class_name = cache_class.__name__
|
||||||
|
file_dict[cache_class_name].close()
|
||||||
|
|
||||||
del self.depends_cache
|
del self.depends_cache
|
||||||
|
|
||||||
|
@ -710,13 +612,10 @@ class Cache(NoCache):
|
||||||
def mtime(cachefile):
|
def mtime(cachefile):
|
||||||
return bb.parse.cached_mtime_noerror(cachefile)
|
return bb.parse.cached_mtime_noerror(cachefile)
|
||||||
|
|
||||||
def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
|
def add_info(self, filename, info_array, cacheData, parsed=None):
|
||||||
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
|
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
|
||||||
cacheData.add_from_recipeinfo(filename, info_array)
|
cacheData.add_from_recipeinfo(filename, info_array)
|
||||||
|
|
||||||
if watcher:
|
|
||||||
watcher(info_array[0].file_depends)
|
|
||||||
|
|
||||||
if not self.has_cache:
|
if not self.has_cache:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -730,13 +629,50 @@ class Cache(NoCache):
|
||||||
Save data we need into the cache
|
Save data we need into the cache
|
||||||
"""
|
"""
|
||||||
|
|
||||||
realfn = virtualfn2realfn(file_name)[0]
|
realfn = self.virtualfn2realfn(file_name)[0]
|
||||||
|
|
||||||
info_array = []
|
info_array = []
|
||||||
for cache_class in self.caches_array:
|
for cache_class in self.caches_array:
|
||||||
info_array.append(cache_class(realfn, data))
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
|
info_array.append(cache_class(realfn, data))
|
||||||
self.add_info(file_name, info_array, cacheData, parsed)
|
self.add_info(file_name, info_array, cacheData, parsed)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def load_bbfile(bbfile, appends, config):
|
||||||
|
"""
|
||||||
|
Load and parse one .bb build file
|
||||||
|
Return the data and whether parsing resulted in the file being skipped
|
||||||
|
"""
|
||||||
|
chdir_back = False
|
||||||
|
|
||||||
|
from bb import data, parse
|
||||||
|
|
||||||
|
# expand tmpdir to include this topdir
|
||||||
|
data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
|
||||||
|
bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
|
||||||
|
oldpath = os.path.abspath(os.getcwd())
|
||||||
|
parse.cached_mtime_noerror(bbfile_loc)
|
||||||
|
bb_data = data.init_db(config)
|
||||||
|
# The ConfHandler first looks if there is a TOPDIR and if not
|
||||||
|
# then it would call getcwd().
|
||||||
|
# Previously, we chdir()ed to bbfile_loc, called the handler
|
||||||
|
# and finally chdir()ed back, a couple of thousand times. We now
|
||||||
|
# just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
|
||||||
|
if not data.getVar('TOPDIR', bb_data):
|
||||||
|
chdir_back = True
|
||||||
|
data.setVar('TOPDIR', bbfile_loc, bb_data)
|
||||||
|
try:
|
||||||
|
if appends:
|
||||||
|
data.setVar('__BBAPPEND', " ".join(appends), bb_data)
|
||||||
|
bb_data = parse.handle(bbfile, bb_data)
|
||||||
|
if chdir_back:
|
||||||
|
os.chdir(oldpath)
|
||||||
|
return bb_data
|
||||||
|
except:
|
||||||
|
if chdir_back:
|
||||||
|
os.chdir(oldpath)
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
def init(cooker):
|
def init(cooker):
|
||||||
"""
|
"""
|
||||||
|
@ -750,12 +686,12 @@ def init(cooker):
|
||||||
|
|
||||||
* Its mtime
|
* Its mtime
|
||||||
* The mtimes of all its dependencies
|
* The mtimes of all its dependencies
|
||||||
* Whether it caused a parse.SkipRecipe exception
|
* Whether it caused a parse.SkipPackage exception
|
||||||
|
|
||||||
Files causing parsing errors are evicted from the cache.
|
Files causing parsing errors are evicted from the cache.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return Cache(cooker.configuration.data, cooker.configuration.data_hash)
|
return Cache(cooker.configuration.data)
|
||||||
|
|
||||||
|
|
||||||
class CacheData(object):
|
class CacheData(object):
|
||||||
|
@ -766,9 +702,8 @@ class CacheData(object):
|
||||||
def __init__(self, caches_array):
|
def __init__(self, caches_array):
|
||||||
self.caches_array = caches_array
|
self.caches_array = caches_array
|
||||||
for cache_class in self.caches_array:
|
for cache_class in self.caches_array:
|
||||||
if not issubclass(cache_class, RecipeInfoCommon):
|
if type(cache_class) is type and issubclass(cache_class, RecipeInfoCommon):
|
||||||
bb.error("Extra cache data class %s should subclass RecipeInfoCommon class" % cache_class)
|
cache_class.init_cacheData(self)
|
||||||
cache_class.init_cacheData(self)
|
|
||||||
|
|
||||||
# Direct cache variables
|
# Direct cache variables
|
||||||
self.task_queues = {}
|
self.task_queues = {}
|
||||||
|
@ -783,107 +718,4 @@ class CacheData(object):
|
||||||
for info in info_array:
|
for info in info_array:
|
||||||
info.add_cacheData(self, fn)
|
info.add_cacheData(self, fn)
|
||||||
|
|
||||||
class MultiProcessCache(object):
|
|
||||||
"""
|
|
||||||
BitBake multi-process cache implementation
|
|
||||||
|
|
||||||
Used by the codeparser & file checksum caches
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.cachefile = None
|
|
||||||
self.cachedata = self.create_cachedata()
|
|
||||||
self.cachedata_extras = self.create_cachedata()
|
|
||||||
|
|
||||||
def init_cache(self, d, cache_file_name=None):
|
|
||||||
cachedir = (d.getVar("PERSISTENT_DIR") or
|
|
||||||
d.getVar("CACHE"))
|
|
||||||
if cachedir in [None, '']:
|
|
||||||
return
|
|
||||||
bb.utils.mkdirhier(cachedir)
|
|
||||||
self.cachefile = os.path.join(cachedir,
|
|
||||||
cache_file_name or self.__class__.cache_file_name)
|
|
||||||
logger.debug(1, "Using cache in '%s'", self.cachefile)
|
|
||||||
|
|
||||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(self.cachefile, "rb") as f:
|
|
||||||
p = pickle.Unpickler(f)
|
|
||||||
data, version = p.load()
|
|
||||||
except:
|
|
||||||
bb.utils.unlockfile(glf)
|
|
||||||
return
|
|
||||||
|
|
||||||
bb.utils.unlockfile(glf)
|
|
||||||
|
|
||||||
if version != self.__class__.CACHE_VERSION:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.cachedata = data
|
|
||||||
|
|
||||||
def create_cachedata(self):
|
|
||||||
data = [{}]
|
|
||||||
return data
|
|
||||||
|
|
||||||
def save_extras(self):
|
|
||||||
if not self.cachefile:
|
|
||||||
return
|
|
||||||
|
|
||||||
glf = bb.utils.lockfile(self.cachefile + ".lock", shared=True)
|
|
||||||
|
|
||||||
i = os.getpid()
|
|
||||||
lf = None
|
|
||||||
while not lf:
|
|
||||||
lf = bb.utils.lockfile(self.cachefile + ".lock." + str(i), retry=False)
|
|
||||||
if not lf or os.path.exists(self.cachefile + "-" + str(i)):
|
|
||||||
if lf:
|
|
||||||
bb.utils.unlockfile(lf)
|
|
||||||
lf = None
|
|
||||||
i = i + 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
with open(self.cachefile + "-" + str(i), "wb") as f:
|
|
||||||
p = pickle.Pickler(f, -1)
|
|
||||||
p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
|
|
||||||
|
|
||||||
bb.utils.unlockfile(lf)
|
|
||||||
bb.utils.unlockfile(glf)
|
|
||||||
|
|
||||||
def merge_data(self, source, dest):
|
|
||||||
for j in range(0,len(dest)):
|
|
||||||
for h in source[j]:
|
|
||||||
if h not in dest[j]:
|
|
||||||
dest[j][h] = source[j][h]
|
|
||||||
|
|
||||||
def save_merge(self):
|
|
||||||
if not self.cachefile:
|
|
||||||
return
|
|
||||||
|
|
||||||
glf = bb.utils.lockfile(self.cachefile + ".lock")
|
|
||||||
|
|
||||||
data = self.cachedata
|
|
||||||
|
|
||||||
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
|
|
||||||
f = os.path.join(os.path.dirname(self.cachefile), f)
|
|
||||||
try:
|
|
||||||
with open(f, "rb") as fd:
|
|
||||||
p = pickle.Unpickler(fd)
|
|
||||||
extradata, version = p.load()
|
|
||||||
except (IOError, EOFError):
|
|
||||||
os.unlink(f)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if version != self.__class__.CACHE_VERSION:
|
|
||||||
os.unlink(f)
|
|
||||||
continue
|
|
||||||
|
|
||||||
self.merge_data(extradata, data)
|
|
||||||
os.unlink(f)
|
|
||||||
|
|
||||||
with open(self.cachefile, "wb") as f:
|
|
||||||
p = pickle.Pickler(f, -1)
|
|
||||||
p.dump([data, self.__class__.CACHE_VERSION])
|
|
||||||
|
|
||||||
bb.utils.unlockfile(glf)
|
|
||||||
|
|
||||||
|
|
|
@ -35,22 +35,11 @@ class HobRecipeInfo(RecipeInfoCommon):
|
||||||
# such as (bb_cache.dat, bb_extracache_hob.dat)
|
# such as (bb_cache.dat, bb_extracache_hob.dat)
|
||||||
cachefile = "bb_extracache_" + classname +".dat"
|
cachefile = "bb_extracache_" + classname +".dat"
|
||||||
|
|
||||||
# override this member with the list of extra cache fields
|
|
||||||
# that this class will provide
|
|
||||||
cachefields = ['summary', 'license', 'section',
|
|
||||||
'description', 'homepage', 'bugtracker',
|
|
||||||
'prevision', 'files_info']
|
|
||||||
|
|
||||||
def __init__(self, filename, metadata):
|
def __init__(self, filename, metadata):
|
||||||
|
|
||||||
self.summary = self.getvar('SUMMARY', metadata)
|
self.summary = self.getvar('SUMMARY', metadata)
|
||||||
self.license = self.getvar('LICENSE', metadata)
|
self.license = self.getvar('LICENSE', metadata)
|
||||||
self.section = self.getvar('SECTION', metadata)
|
self.section = self.getvar('SECTION', metadata)
|
||||||
self.description = self.getvar('DESCRIPTION', metadata)
|
|
||||||
self.homepage = self.getvar('HOMEPAGE', metadata)
|
|
||||||
self.bugtracker = self.getvar('BUGTRACKER', metadata)
|
|
||||||
self.prevision = self.getvar('PR', metadata)
|
|
||||||
self.files_info = self.getvar('FILES_INFO', metadata)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def init_cacheData(cls, cachedata):
|
def init_cacheData(cls, cachedata):
|
||||||
|
@ -58,18 +47,8 @@ class HobRecipeInfo(RecipeInfoCommon):
|
||||||
cachedata.summary = {}
|
cachedata.summary = {}
|
||||||
cachedata.license = {}
|
cachedata.license = {}
|
||||||
cachedata.section = {}
|
cachedata.section = {}
|
||||||
cachedata.description = {}
|
|
||||||
cachedata.homepage = {}
|
|
||||||
cachedata.bugtracker = {}
|
|
||||||
cachedata.prevision = {}
|
|
||||||
cachedata.files_info = {}
|
|
||||||
|
|
||||||
def add_cacheData(self, cachedata, fn):
|
def add_cacheData(self, cachedata, fn):
|
||||||
cachedata.summary[fn] = self.summary
|
cachedata.summary[fn] = self.summary
|
||||||
cachedata.license[fn] = self.license
|
cachedata.license[fn] = self.license
|
||||||
cachedata.section[fn] = self.section
|
cachedata.section[fn] = self.section
|
||||||
cachedata.description[fn] = self.description
|
|
||||||
cachedata.homepage[fn] = self.homepage
|
|
||||||
cachedata.bugtracker[fn] = self.bugtracker
|
|
||||||
cachedata.prevision[fn] = self.prevision
|
|
||||||
cachedata.files_info[fn] = self.files_info
|
|
||||||
|
|
|
@ -1,134 +0,0 @@
|
||||||
# Local file checksum cache implementation
|
|
||||||
#
|
|
||||||
# Copyright (C) 2012 Intel Corporation
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import glob
|
|
||||||
import operator
|
|
||||||
import os
|
|
||||||
import stat
|
|
||||||
import pickle
|
|
||||||
import bb.utils
|
|
||||||
import logging
|
|
||||||
from bb.cache import MultiProcessCache
|
|
||||||
|
|
||||||
logger = logging.getLogger("BitBake.Cache")
|
|
||||||
|
|
||||||
# mtime cache (non-persistent)
|
|
||||||
# based upon the assumption that files do not change during bitbake run
|
|
||||||
class FileMtimeCache(object):
|
|
||||||
cache = {}
|
|
||||||
|
|
||||||
def cached_mtime(self, f):
|
|
||||||
if f not in self.cache:
|
|
||||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
|
||||||
return self.cache[f]
|
|
||||||
|
|
||||||
def cached_mtime_noerror(self, f):
|
|
||||||
if f not in self.cache:
|
|
||||||
try:
|
|
||||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
|
||||||
except OSError:
|
|
||||||
return 0
|
|
||||||
return self.cache[f]
|
|
||||||
|
|
||||||
def update_mtime(self, f):
|
|
||||||
self.cache[f] = os.stat(f)[stat.ST_MTIME]
|
|
||||||
return self.cache[f]
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
self.cache.clear()
|
|
||||||
|
|
||||||
# Checksum + mtime cache (persistent)
|
|
||||||
class FileChecksumCache(MultiProcessCache):
|
|
||||||
cache_file_name = "local_file_checksum_cache.dat"
|
|
||||||
CACHE_VERSION = 1
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.mtime_cache = FileMtimeCache()
|
|
||||||
MultiProcessCache.__init__(self)
|
|
||||||
|
|
||||||
def get_checksum(self, f):
|
|
||||||
entry = self.cachedata[0].get(f)
|
|
||||||
cmtime = self.mtime_cache.cached_mtime(f)
|
|
||||||
if entry:
|
|
||||||
(mtime, hashval) = entry
|
|
||||||
if cmtime == mtime:
|
|
||||||
return hashval
|
|
||||||
else:
|
|
||||||
bb.debug(2, "file %s changed mtime, recompute checksum" % f)
|
|
||||||
|
|
||||||
hashval = bb.utils.md5_file(f)
|
|
||||||
self.cachedata_extras[0][f] = (cmtime, hashval)
|
|
||||||
return hashval
|
|
||||||
|
|
||||||
def merge_data(self, source, dest):
|
|
||||||
for h in source[0]:
|
|
||||||
if h in dest:
|
|
||||||
(smtime, _) = source[0][h]
|
|
||||||
(dmtime, _) = dest[0][h]
|
|
||||||
if smtime > dmtime:
|
|
||||||
dest[0][h] = source[0][h]
|
|
||||||
else:
|
|
||||||
dest[0][h] = source[0][h]
|
|
||||||
|
|
||||||
def get_checksums(self, filelist, pn):
|
|
||||||
"""Get checksums for a list of files"""
|
|
||||||
|
|
||||||
def checksum_file(f):
|
|
||||||
try:
|
|
||||||
checksum = self.get_checksum(f)
|
|
||||||
except OSError as e:
|
|
||||||
bb.warn("Unable to get checksum for %s SRC_URI entry %s: %s" % (pn, os.path.basename(f), e))
|
|
||||||
return None
|
|
||||||
return checksum
|
|
||||||
|
|
||||||
def checksum_dir(pth):
|
|
||||||
# Handle directories recursively
|
|
||||||
dirchecksums = []
|
|
||||||
for root, dirs, files in os.walk(pth):
|
|
||||||
for name in files:
|
|
||||||
fullpth = os.path.join(root, name)
|
|
||||||
checksum = checksum_file(fullpth)
|
|
||||||
if checksum:
|
|
||||||
dirchecksums.append((fullpth, checksum))
|
|
||||||
return dirchecksums
|
|
||||||
|
|
||||||
checksums = []
|
|
||||||
for pth in filelist.split():
|
|
||||||
exist = pth.split(":")[1]
|
|
||||||
if exist == "False":
|
|
||||||
continue
|
|
||||||
pth = pth.split(":")[0]
|
|
||||||
if '*' in pth:
|
|
||||||
# Handle globs
|
|
||||||
for f in glob.glob(pth):
|
|
||||||
if os.path.isdir(f):
|
|
||||||
if not os.path.islink(f):
|
|
||||||
checksums.extend(checksum_dir(f))
|
|
||||||
else:
|
|
||||||
checksum = checksum_file(f)
|
|
||||||
if checksum:
|
|
||||||
checksums.append((f, checksum))
|
|
||||||
elif os.path.isdir(pth):
|
|
||||||
if not os.path.islink(pth):
|
|
||||||
checksums.extend(checksum_dir(pth))
|
|
||||||
else:
|
|
||||||
checksum = checksum_file(pth)
|
|
||||||
if checksum:
|
|
||||||
checksums.append((pth, checksum))
|
|
||||||
|
|
||||||
checksums.sort(key=operator.itemgetter(1))
|
|
||||||
return checksums
|
|
|
@ -1,39 +1,21 @@
|
||||||
"""
|
|
||||||
BitBake code parser
|
|
||||||
|
|
||||||
Parses actual code (i.e. python and shell) for functions and in-line
|
|
||||||
expressions. Used mainly to determine dependencies on other functions
|
|
||||||
and variables within the BitBake metadata. Also provides a cache for
|
|
||||||
this information in order to speed up processing.
|
|
||||||
|
|
||||||
(Not to be confused with the code that parses the metadata itself,
|
|
||||||
see lib/bb/parse/ for that).
|
|
||||||
|
|
||||||
NOTE: if you change how the parsers gather information you will almost
|
|
||||||
certainly need to increment CodeParserCache.CACHE_VERSION below so that
|
|
||||||
any existing codeparser cache gets invalidated. Additionally you'll need
|
|
||||||
to increment __cache_version__ in cache.py in order to ensure that old
|
|
||||||
recipe caches don't trigger "Taskhash mismatch" errors.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import ast
|
import ast
|
||||||
import sys
|
|
||||||
import codegen
|
import codegen
|
||||||
import logging
|
import logging
|
||||||
import pickle
|
|
||||||
import bb.pysh as pysh
|
|
||||||
import os.path
|
import os.path
|
||||||
import bb.utils, bb.data
|
import bb.utils, bb.data
|
||||||
import hashlib
|
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from bb.pysh import pyshyacc, pyshlex, sherrors
|
from pysh import pyshyacc, pyshlex, sherrors
|
||||||
from bb.cache import MultiProcessCache
|
|
||||||
|
|
||||||
logger = logging.getLogger('BitBake.CodeParser')
|
logger = logging.getLogger('BitBake.CodeParser')
|
||||||
|
PARSERCACHE_VERSION = 2
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cPickle as pickle
|
||||||
|
except ImportError:
|
||||||
|
import pickle
|
||||||
|
logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
|
||||||
|
|
||||||
def bbhash(s):
|
|
||||||
return hashlib.md5(s.encode("utf-8")).hexdigest()
|
|
||||||
|
|
||||||
def check_indent(codestr):
|
def check_indent(codestr):
|
||||||
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
|
"""If the code is indented, add a top level piece of code to 'remove' the indentation"""
|
||||||
|
@ -46,155 +28,127 @@ def check_indent(codestr):
|
||||||
return codestr
|
return codestr
|
||||||
|
|
||||||
if codestr[i-1] == "\t" or codestr[i-1] == " ":
|
if codestr[i-1] == "\t" or codestr[i-1] == " ":
|
||||||
if codestr[0] == "\n":
|
|
||||||
# Since we're adding a line, we need to remove one line of any empty padding
|
|
||||||
# to ensure line numbers are correct
|
|
||||||
codestr = codestr[1:]
|
|
||||||
return "if 1:\n" + codestr
|
return "if 1:\n" + codestr
|
||||||
|
|
||||||
return codestr
|
return codestr
|
||||||
|
|
||||||
|
pythonparsecache = {}
|
||||||
|
shellparsecache = {}
|
||||||
|
|
||||||
# Basically pickle, in python 2.7.3 at least, does badly with data duplication
|
def parser_cachefile(d):
|
||||||
# upon pickling and unpickling. Combine this with duplicate objects and things
|
cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
|
||||||
# are a mess.
|
bb.data.getVar("CACHE", d, True))
|
||||||
#
|
if cachedir in [None, '']:
|
||||||
# When the sets are originally created, python calls intern() on the set keys
|
return None
|
||||||
# which significantly improves memory usage. Sadly the pickle/unpickle process
|
bb.utils.mkdirhier(cachedir)
|
||||||
# doesn't call intern() on the keys and results in the same strings being duplicated
|
cachefile = os.path.join(cachedir, "bb_codeparser.dat")
|
||||||
# in memory. This also means pickle will save the same string multiple times in
|
logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
|
||||||
# the cache file.
|
return cachefile
|
||||||
#
|
|
||||||
# By having shell and python cacheline objects with setstate/getstate, we force
|
|
||||||
# the object creation through our own routine where we can call intern (via internSet).
|
|
||||||
#
|
|
||||||
# We also use hashable frozensets and ensure we use references to these so that
|
|
||||||
# duplicates can be removed, both in memory and in the resulting pickled data.
|
|
||||||
#
|
|
||||||
# By playing these games, the size of the cache file shrinks dramatically
|
|
||||||
# meaning faster load times and the reloaded cache files also consume much less
|
|
||||||
# memory. Smaller cache files, faster load times and lower memory usage is good.
|
|
||||||
#
|
|
||||||
# A custom getstate/setstate using tuples is actually worth 15% cachesize by
|
|
||||||
# avoiding duplication of the attribute names!
|
|
||||||
|
|
||||||
class SetCache(object):
|
|
||||||
def __init__(self):
|
|
||||||
self.setcache = {}
|
|
||||||
|
|
||||||
def internSet(self, items):
|
|
||||||
|
|
||||||
new = []
|
|
||||||
for i in items:
|
|
||||||
new.append(sys.intern(i))
|
|
||||||
s = frozenset(new)
|
|
||||||
h = hash(s)
|
|
||||||
if h in self.setcache:
|
|
||||||
return self.setcache[h]
|
|
||||||
self.setcache[h] = s
|
|
||||||
return s
|
|
||||||
|
|
||||||
codecache = SetCache()
|
|
||||||
|
|
||||||
class pythonCacheLine(object):
|
|
||||||
def __init__(self, refs, execs, contains):
|
|
||||||
self.refs = codecache.internSet(refs)
|
|
||||||
self.execs = codecache.internSet(execs)
|
|
||||||
self.contains = {}
|
|
||||||
for c in contains:
|
|
||||||
self.contains[c] = codecache.internSet(contains[c])
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
return (self.refs, self.execs, self.contains)
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
(refs, execs, contains) = state
|
|
||||||
self.__init__(refs, execs, contains)
|
|
||||||
def __hash__(self):
|
|
||||||
l = (hash(self.refs), hash(self.execs))
|
|
||||||
for c in sorted(self.contains.keys()):
|
|
||||||
l = l + (c, hash(self.contains[c]))
|
|
||||||
return hash(l)
|
|
||||||
def __repr__(self):
|
|
||||||
return " ".join([str(self.refs), str(self.execs), str(self.contains)])
|
|
||||||
|
|
||||||
|
|
||||||
class shellCacheLine(object):
|
|
||||||
def __init__(self, execs):
|
|
||||||
self.execs = codecache.internSet(execs)
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
return (self.execs)
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
(execs) = state
|
|
||||||
self.__init__(execs)
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(self.execs)
|
|
||||||
def __repr__(self):
|
|
||||||
return str(self.execs)
|
|
||||||
|
|
||||||
class CodeParserCache(MultiProcessCache):
|
|
||||||
cache_file_name = "bb_codeparser.dat"
|
|
||||||
# NOTE: you must increment this if you change how the parsers gather information,
|
|
||||||
# so that an existing cache gets invalidated. Additionally you'll need
|
|
||||||
# to increment __cache_version__ in cache.py in order to ensure that old
|
|
||||||
# recipe caches don't trigger "Taskhash mismatch" errors.
|
|
||||||
CACHE_VERSION = 9
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
MultiProcessCache.__init__(self)
|
|
||||||
self.pythoncache = self.cachedata[0]
|
|
||||||
self.shellcache = self.cachedata[1]
|
|
||||||
self.pythoncacheextras = self.cachedata_extras[0]
|
|
||||||
self.shellcacheextras = self.cachedata_extras[1]
|
|
||||||
|
|
||||||
# To avoid duplication in the codeparser cache, keep
|
|
||||||
# a lookup of hashes of objects we already have
|
|
||||||
self.pythoncachelines = {}
|
|
||||||
self.shellcachelines = {}
|
|
||||||
|
|
||||||
def newPythonCacheLine(self, refs, execs, contains):
|
|
||||||
cacheline = pythonCacheLine(refs, execs, contains)
|
|
||||||
h = hash(cacheline)
|
|
||||||
if h in self.pythoncachelines:
|
|
||||||
return self.pythoncachelines[h]
|
|
||||||
self.pythoncachelines[h] = cacheline
|
|
||||||
return cacheline
|
|
||||||
|
|
||||||
def newShellCacheLine(self, execs):
|
|
||||||
cacheline = shellCacheLine(execs)
|
|
||||||
h = hash(cacheline)
|
|
||||||
if h in self.shellcachelines:
|
|
||||||
return self.shellcachelines[h]
|
|
||||||
self.shellcachelines[h] = cacheline
|
|
||||||
return cacheline
|
|
||||||
|
|
||||||
def init_cache(self, d):
|
|
||||||
# Check if we already have the caches
|
|
||||||
if self.pythoncache:
|
|
||||||
return
|
|
||||||
|
|
||||||
MultiProcessCache.init_cache(self, d)
|
|
||||||
|
|
||||||
# cachedata gets re-assigned in the parent
|
|
||||||
self.pythoncache = self.cachedata[0]
|
|
||||||
self.shellcache = self.cachedata[1]
|
|
||||||
|
|
||||||
def create_cachedata(self):
|
|
||||||
data = [{}, {}]
|
|
||||||
return data
|
|
||||||
|
|
||||||
codeparsercache = CodeParserCache()
|
|
||||||
|
|
||||||
def parser_cache_init(d):
|
def parser_cache_init(d):
|
||||||
codeparsercache.init_cache(d)
|
global pythonparsecache
|
||||||
|
global shellparsecache
|
||||||
|
|
||||||
def parser_cache_save():
|
cachefile = parser_cachefile(d)
|
||||||
codeparsercache.save_extras()
|
if not cachefile:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||||
|
data, version = p.load()
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
if version != PARSERCACHE_VERSION:
|
||||||
|
return
|
||||||
|
|
||||||
|
pythonparsecache = data[0]
|
||||||
|
shellparsecache = data[1]
|
||||||
|
|
||||||
|
def parser_cache_save(d):
|
||||||
|
cachefile = parser_cachefile(d)
|
||||||
|
if not cachefile:
|
||||||
|
return
|
||||||
|
|
||||||
|
glf = bb.utils.lockfile(cachefile + ".lock", shared=True)
|
||||||
|
|
||||||
|
i = os.getpid()
|
||||||
|
lf = None
|
||||||
|
while not lf:
|
||||||
|
shellcache = {}
|
||||||
|
pythoncache = {}
|
||||||
|
|
||||||
|
lf = bb.utils.lockfile(cachefile + ".lock." + str(i), retry=False)
|
||||||
|
if not lf or os.path.exists(cachefile + "-" + str(i)):
|
||||||
|
if lf:
|
||||||
|
bb.utils.unlockfile(lf)
|
||||||
|
lf = None
|
||||||
|
i = i + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||||
|
data, version = p.load()
|
||||||
|
except (IOError, EOFError, ValueError):
|
||||||
|
data, version = None, None
|
||||||
|
|
||||||
|
if version != PARSERCACHE_VERSION:
|
||||||
|
shellcache = shellparsecache
|
||||||
|
pythoncache = pythonparsecache
|
||||||
|
else:
|
||||||
|
for h in pythonparsecache:
|
||||||
|
if h not in data[0]:
|
||||||
|
pythoncache[h] = pythonparsecache[h]
|
||||||
|
for h in shellparsecache:
|
||||||
|
if h not in data[1]:
|
||||||
|
shellcache[h] = shellparsecache[h]
|
||||||
|
|
||||||
|
p = pickle.Pickler(file(cachefile + "-" + str(i), "wb"), -1)
|
||||||
|
p.dump([[pythoncache, shellcache], PARSERCACHE_VERSION])
|
||||||
|
|
||||||
|
bb.utils.unlockfile(lf)
|
||||||
|
bb.utils.unlockfile(glf)
|
||||||
|
|
||||||
|
def parser_cache_savemerge(d):
|
||||||
|
cachefile = parser_cachefile(d)
|
||||||
|
if not cachefile:
|
||||||
|
return
|
||||||
|
|
||||||
|
glf = bb.utils.lockfile(cachefile + ".lock")
|
||||||
|
|
||||||
|
try:
|
||||||
|
p = pickle.Unpickler(file(cachefile, "rb"))
|
||||||
|
data, version = p.load()
|
||||||
|
except (IOError, EOFError):
|
||||||
|
data, version = None, None
|
||||||
|
|
||||||
|
if version != PARSERCACHE_VERSION:
|
||||||
|
data = [{}, {}]
|
||||||
|
|
||||||
|
for f in [y for y in os.listdir(os.path.dirname(cachefile)) if y.startswith(os.path.basename(cachefile) + '-')]:
|
||||||
|
f = os.path.join(os.path.dirname(cachefile), f)
|
||||||
|
try:
|
||||||
|
p = pickle.Unpickler(file(f, "rb"))
|
||||||
|
extradata, version = p.load()
|
||||||
|
except (IOError, EOFError):
|
||||||
|
extradata, version = [{}, {}], None
|
||||||
|
|
||||||
|
if version != PARSERCACHE_VERSION:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for h in extradata[0]:
|
||||||
|
if h not in data[0]:
|
||||||
|
data[0][h] = extradata[0][h]
|
||||||
|
for h in extradata[1]:
|
||||||
|
if h not in data[1]:
|
||||||
|
data[1][h] = extradata[1][h]
|
||||||
|
os.unlink(f)
|
||||||
|
|
||||||
|
p = pickle.Pickler(file(cachefile, "wb"), -1)
|
||||||
|
p.dump([data, PARSERCACHE_VERSION])
|
||||||
|
|
||||||
|
bb.utils.unlockfile(glf)
|
||||||
|
|
||||||
def parser_cache_savemerge():
|
|
||||||
codeparsercache.save_merge()
|
|
||||||
|
|
||||||
Logger = logging.getLoggerClass()
|
Logger = logging.getLoggerClass()
|
||||||
class BufferedLogger(Logger):
|
class BufferedLogger(Logger):
|
||||||
|
@ -209,15 +163,11 @@ class BufferedLogger(Logger):
|
||||||
|
|
||||||
def flush(self):
|
def flush(self):
|
||||||
for record in self.buffer:
|
for record in self.buffer:
|
||||||
if self.target.isEnabledFor(record.levelno):
|
self.target.handle(record)
|
||||||
self.target.handle(record)
|
|
||||||
self.buffer = []
|
self.buffer = []
|
||||||
|
|
||||||
class PythonParser():
|
class PythonParser():
|
||||||
getvars = (".getVar", ".appendVar", ".prependVar")
|
getvars = ("d.getVar", "bb.data.getVar", "data.getVar")
|
||||||
getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag")
|
|
||||||
containsfuncs = ("bb.utils.contains", "base_contains")
|
|
||||||
containsanyfuncs = ("bb.utils.contains_any", "bb.utils.filter")
|
|
||||||
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
|
||||||
|
|
||||||
def warn(self, func, arg):
|
def warn(self, func, arg):
|
||||||
|
@ -236,37 +186,11 @@ class PythonParser():
|
||||||
|
|
||||||
def visit_Call(self, node):
|
def visit_Call(self, node):
|
||||||
name = self.called_node_name(node.func)
|
name = self.called_node_name(node.func)
|
||||||
if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs):
|
if name in self.getvars:
|
||||||
if isinstance(node.args[0], ast.Str):
|
if isinstance(node.args[0], ast.Str):
|
||||||
varname = node.args[0].s
|
self.var_references.add(node.args[0].s)
|
||||||
if name in self.containsfuncs and isinstance(node.args[1], ast.Str):
|
|
||||||
if varname not in self.contains:
|
|
||||||
self.contains[varname] = set()
|
|
||||||
self.contains[varname].add(node.args[1].s)
|
|
||||||
elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str):
|
|
||||||
if varname not in self.contains:
|
|
||||||
self.contains[varname] = set()
|
|
||||||
self.contains[varname].update(node.args[1].s.split())
|
|
||||||
elif name.endswith(self.getvarflags):
|
|
||||||
if isinstance(node.args[1], ast.Str):
|
|
||||||
self.references.add('%s[%s]' % (varname, node.args[1].s))
|
|
||||||
else:
|
|
||||||
self.warn(node.func, node.args[1])
|
|
||||||
else:
|
|
||||||
self.references.add(varname)
|
|
||||||
else:
|
else:
|
||||||
self.warn(node.func, node.args[0])
|
self.warn(node.func, node.args[0])
|
||||||
elif name and name.endswith(".expand"):
|
|
||||||
if isinstance(node.args[0], ast.Str):
|
|
||||||
value = node.args[0].s
|
|
||||||
d = bb.data.init()
|
|
||||||
parser = d.expandWithRefs(value, self.name)
|
|
||||||
self.references |= parser.references
|
|
||||||
self.execs |= parser.execs
|
|
||||||
for varname in parser.contains:
|
|
||||||
if varname not in self.contains:
|
|
||||||
self.contains[varname] = set()
|
|
||||||
self.contains[varname] |= parser.contains[varname]
|
|
||||||
elif name in self.execfuncs:
|
elif name in self.execfuncs:
|
||||||
if isinstance(node.args[0], ast.Str):
|
if isinstance(node.args[0], ast.Str):
|
||||||
self.var_execs.add(node.args[0].s)
|
self.var_execs.add(node.args[0].s)
|
||||||
|
@ -289,50 +213,36 @@ class PythonParser():
|
||||||
break
|
break
|
||||||
|
|
||||||
def __init__(self, name, log):
|
def __init__(self, name, log):
|
||||||
self.name = name
|
self.var_references = set()
|
||||||
self.var_execs = set()
|
self.var_execs = set()
|
||||||
self.contains = {}
|
|
||||||
self.execs = set()
|
self.execs = set()
|
||||||
self.references = set()
|
self.references = set()
|
||||||
self.log = BufferedLogger('BitBake.Data.PythonParser', logging.DEBUG, log)
|
self.log = BufferedLogger('BitBake.Data.%s' % name, logging.DEBUG, log)
|
||||||
|
|
||||||
self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
|
self.unhandled_message = "in call of %s, argument '%s' is not a string literal"
|
||||||
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
|
self.unhandled_message = "while parsing %s, %s" % (name, self.unhandled_message)
|
||||||
|
|
||||||
def parse_python(self, node, lineno=0, filename="<string>"):
|
def parse_python(self, node):
|
||||||
if not node or not node.strip():
|
h = hash(str(node))
|
||||||
|
|
||||||
|
if h in pythonparsecache:
|
||||||
|
self.references = pythonparsecache[h]["refs"]
|
||||||
|
self.execs = pythonparsecache[h]["execs"]
|
||||||
return
|
return
|
||||||
|
|
||||||
h = bbhash(str(node))
|
code = compile(check_indent(str(node)), "<string>", "exec",
|
||||||
|
|
||||||
if h in codeparsercache.pythoncache:
|
|
||||||
self.references = set(codeparsercache.pythoncache[h].refs)
|
|
||||||
self.execs = set(codeparsercache.pythoncache[h].execs)
|
|
||||||
self.contains = {}
|
|
||||||
for i in codeparsercache.pythoncache[h].contains:
|
|
||||||
self.contains[i] = set(codeparsercache.pythoncache[h].contains[i])
|
|
||||||
return
|
|
||||||
|
|
||||||
if h in codeparsercache.pythoncacheextras:
|
|
||||||
self.references = set(codeparsercache.pythoncacheextras[h].refs)
|
|
||||||
self.execs = set(codeparsercache.pythoncacheextras[h].execs)
|
|
||||||
self.contains = {}
|
|
||||||
for i in codeparsercache.pythoncacheextras[h].contains:
|
|
||||||
self.contains[i] = set(codeparsercache.pythoncacheextras[h].contains[i])
|
|
||||||
return
|
|
||||||
|
|
||||||
# We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
|
|
||||||
node = "\n" * int(lineno) + node
|
|
||||||
code = compile(check_indent(str(node)), filename, "exec",
|
|
||||||
ast.PyCF_ONLY_AST)
|
ast.PyCF_ONLY_AST)
|
||||||
|
|
||||||
for n in ast.walk(code):
|
for n in ast.walk(code):
|
||||||
if n.__class__.__name__ == "Call":
|
if n.__class__.__name__ == "Call":
|
||||||
self.visit_Call(n)
|
self.visit_Call(n)
|
||||||
|
|
||||||
self.execs.update(self.var_execs)
|
self.references.update(self.var_references)
|
||||||
|
self.references.update(self.var_execs)
|
||||||
|
|
||||||
codeparsercache.pythoncacheextras[h] = codeparsercache.newPythonCacheLine(self.references, self.execs, self.contains)
|
pythonparsecache[h] = {}
|
||||||
|
pythonparsecache[h]["refs"] = self.references
|
||||||
|
pythonparsecache[h]["execs"] = self.execs
|
||||||
|
|
||||||
class ShellParser():
|
class ShellParser():
|
||||||
def __init__(self, name, log):
|
def __init__(self, name, log):
|
||||||
|
@ -348,30 +258,25 @@ class ShellParser():
|
||||||
commands it executes.
|
commands it executes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
h = bbhash(str(value))
|
h = hash(str(value))
|
||||||
|
|
||||||
if h in codeparsercache.shellcache:
|
if h in shellparsecache:
|
||||||
self.execs = set(codeparsercache.shellcache[h].execs)
|
self.execs = shellparsecache[h]["execs"]
|
||||||
return self.execs
|
return self.execs
|
||||||
|
|
||||||
if h in codeparsercache.shellcacheextras:
|
|
||||||
self.execs = set(codeparsercache.shellcacheextras[h].execs)
|
|
||||||
return self.execs
|
|
||||||
|
|
||||||
self._parse_shell(value)
|
|
||||||
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
|
||||||
|
|
||||||
codeparsercache.shellcacheextras[h] = codeparsercache.newShellCacheLine(self.execs)
|
|
||||||
|
|
||||||
return self.execs
|
|
||||||
|
|
||||||
def _parse_shell(self, value):
|
|
||||||
try:
|
try:
|
||||||
tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
|
tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
|
||||||
except pyshlex.NeedMore:
|
except pyshlex.NeedMore:
|
||||||
raise sherrors.ShellSyntaxError("Unexpected EOF")
|
raise sherrors.ShellSyntaxError("Unexpected EOF")
|
||||||
|
|
||||||
self.process_tokens(tokens)
|
for token in tokens:
|
||||||
|
self.process_tokens(token)
|
||||||
|
self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
|
||||||
|
|
||||||
|
shellparsecache[h] = {}
|
||||||
|
shellparsecache[h]["execs"] = self.execs
|
||||||
|
|
||||||
|
return self.execs
|
||||||
|
|
||||||
def process_tokens(self, tokens):
|
def process_tokens(self, tokens):
|
||||||
"""Process a supplied portion of the syntax tree as returned by
|
"""Process a supplied portion of the syntax tree as returned by
|
||||||
|
@ -417,24 +322,18 @@ class ShellParser():
|
||||||
"case_clause": case_clause,
|
"case_clause": case_clause,
|
||||||
}
|
}
|
||||||
|
|
||||||
def process_token_list(tokens):
|
for token in tokens:
|
||||||
for token in tokens:
|
name, value = token
|
||||||
if isinstance(token, list):
|
try:
|
||||||
process_token_list(token)
|
more_tokens, words = token_handlers[name](value)
|
||||||
continue
|
except KeyError:
|
||||||
name, value = token
|
raise NotImplementedError("Unsupported token type " + name)
|
||||||
try:
|
|
||||||
more_tokens, words = token_handlers[name](value)
|
|
||||||
except KeyError:
|
|
||||||
raise NotImplementedError("Unsupported token type " + name)
|
|
||||||
|
|
||||||
if more_tokens:
|
if more_tokens:
|
||||||
self.process_tokens(more_tokens)
|
self.process_tokens(more_tokens)
|
||||||
|
|
||||||
if words:
|
if words:
|
||||||
self.process_words(words)
|
self.process_words(words)
|
||||||
|
|
||||||
process_token_list(tokens)
|
|
||||||
|
|
||||||
def process_words(self, words):
|
def process_words(self, words):
|
||||||
"""Process a set of 'words' in pyshyacc parlance, which includes
|
"""Process a set of 'words' in pyshyacc parlance, which includes
|
||||||
|
@ -451,7 +350,7 @@ class ShellParser():
|
||||||
|
|
||||||
if part[0] in ('`', '$('):
|
if part[0] in ('`', '$('):
|
||||||
command = pyshlex.wordtree_as_string(part[1:-1])
|
command = pyshlex.wordtree_as_string(part[1:-1])
|
||||||
self._parse_shell(command)
|
self.parse_shell(command)
|
||||||
|
|
||||||
if word[0] in ("cmd_name", "cmd_word"):
|
if word[0] in ("cmd_name", "cmd_word"):
|
||||||
if word in words:
|
if word in words:
|
||||||
|
@ -470,7 +369,7 @@ class ShellParser():
|
||||||
self.log.debug(1, self.unhandled_template % cmd)
|
self.log.debug(1, self.unhandled_template % cmd)
|
||||||
elif cmd == "eval":
|
elif cmd == "eval":
|
||||||
command = " ".join(word for _, word in words[1:])
|
command = " ".join(word for _, word in words[1:])
|
||||||
self._parse_shell(command)
|
self.parse_shell(command)
|
||||||
else:
|
else:
|
||||||
self.allexecs.add(cmd)
|
self.allexecs.add(cmd)
|
||||||
break
|
break
|
||||||
|
|
|
@ -28,15 +28,13 @@ and must not trigger events, directly or indirectly.
|
||||||
Commands are queued in a CommandQueue
|
Commands are queued in a CommandQueue
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from collections import OrderedDict, defaultdict
|
|
||||||
|
|
||||||
import bb.event
|
import bb.event
|
||||||
import bb.cooker
|
import bb.cooker
|
||||||
import bb.remotedata
|
import bb.data
|
||||||
|
|
||||||
|
async_cmds = {}
|
||||||
|
sync_cmds = {}
|
||||||
|
|
||||||
class DataStoreConnectionHandle(object):
|
|
||||||
def __init__(self, dsindex=0):
|
|
||||||
self.dsindex = dsindex
|
|
||||||
|
|
||||||
class CommandCompleted(bb.event.Event):
|
class CommandCompleted(bb.event.Event):
|
||||||
pass
|
pass
|
||||||
|
@ -51,9 +49,6 @@ class CommandFailed(CommandExit):
|
||||||
self.error = message
|
self.error = message
|
||||||
CommandExit.__init__(self, 1)
|
CommandExit.__init__(self, 1)
|
||||||
|
|
||||||
class CommandError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class Command:
|
class Command:
|
||||||
"""
|
"""
|
||||||
A queue of asynchronous commands for bitbake
|
A queue of asynchronous commands for bitbake
|
||||||
|
@ -62,50 +57,45 @@ class Command:
|
||||||
self.cooker = cooker
|
self.cooker = cooker
|
||||||
self.cmds_sync = CommandsSync()
|
self.cmds_sync = CommandsSync()
|
||||||
self.cmds_async = CommandsAsync()
|
self.cmds_async = CommandsAsync()
|
||||||
self.remotedatastores = bb.remotedata.RemoteDatastores(cooker)
|
|
||||||
|
|
||||||
# FIXME Add lock for this
|
# FIXME Add lock for this
|
||||||
self.currentAsyncCommand = None
|
self.currentAsyncCommand = None
|
||||||
|
|
||||||
def runCommand(self, commandline, ro_only = False):
|
for attr in CommandsSync.__dict__:
|
||||||
command = commandline.pop(0)
|
command = attr[:].lower()
|
||||||
if hasattr(CommandsSync, command):
|
method = getattr(CommandsSync, attr)
|
||||||
# Can run synchronous commands straight away
|
sync_cmds[command] = (method)
|
||||||
command_method = getattr(self.cmds_sync, command)
|
|
||||||
if ro_only:
|
for attr in CommandsAsync.__dict__:
|
||||||
if not hasattr(command_method, 'readonly') or False == getattr(command_method, 'readonly'):
|
command = attr[:].lower()
|
||||||
return None, "Not able to execute not readonly commands in readonly mode"
|
method = getattr(CommandsAsync, attr)
|
||||||
try:
|
async_cmds[command] = (method)
|
||||||
if getattr(command_method, 'needconfig', False):
|
|
||||||
self.cooker.updateCacheSync()
|
def runCommand(self, commandline):
|
||||||
result = command_method(self, commandline)
|
try:
|
||||||
except CommandError as exc:
|
command = commandline.pop(0)
|
||||||
return None, exc.args[0]
|
if command in CommandsSync.__dict__:
|
||||||
except (Exception, SystemExit):
|
# Can run synchronous commands straight away
|
||||||
import traceback
|
return getattr(CommandsSync, command)(self.cmds_sync, self, commandline)
|
||||||
return None, traceback.format_exc()
|
if self.currentAsyncCommand is not None:
|
||||||
else:
|
return "Busy (%s in progress)" % self.currentAsyncCommand[0]
|
||||||
return result, None
|
if command not in CommandsAsync.__dict__:
|
||||||
if self.currentAsyncCommand is not None:
|
return "No such command"
|
||||||
return None, "Busy (%s in progress)" % self.currentAsyncCommand[0]
|
self.currentAsyncCommand = (command, commandline)
|
||||||
if command not in CommandsAsync.__dict__:
|
self.cooker.server_registration_cb(self.cooker.runCommands, self.cooker)
|
||||||
return None, "No such command"
|
return True
|
||||||
self.currentAsyncCommand = (command, commandline)
|
except:
|
||||||
self.cooker.configuration.server_register_idlecallback(self.cooker.runCommands, self.cooker)
|
import traceback
|
||||||
return True, None
|
return traceback.format_exc()
|
||||||
|
|
||||||
def runAsyncCommand(self):
|
def runAsyncCommand(self):
|
||||||
try:
|
try:
|
||||||
if self.cooker.state in (bb.cooker.state.error, bb.cooker.state.shutdown, bb.cooker.state.forceshutdown):
|
|
||||||
# updateCache will trigger a shutdown of the parser
|
|
||||||
# and then raise BBHandledException triggering an exit
|
|
||||||
self.cooker.updateCache()
|
|
||||||
return False
|
|
||||||
if self.currentAsyncCommand is not None:
|
if self.currentAsyncCommand is not None:
|
||||||
(command, options) = self.currentAsyncCommand
|
(command, options) = self.currentAsyncCommand
|
||||||
commandmethod = getattr(CommandsAsync, command)
|
commandmethod = getattr(CommandsAsync, command)
|
||||||
needcache = getattr( commandmethod, "needcache" )
|
needcache = getattr( commandmethod, "needcache" )
|
||||||
if needcache and self.cooker.state != bb.cooker.state.running:
|
if (needcache and self.cooker.state in
|
||||||
|
(bb.cooker.state.initial, bb.cooker.state.parsing)):
|
||||||
self.cooker.updateCache()
|
self.cooker.updateCache()
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
@ -118,34 +108,25 @@ class Command:
|
||||||
return False
|
return False
|
||||||
except SystemExit as exc:
|
except SystemExit as exc:
|
||||||
arg = exc.args[0]
|
arg = exc.args[0]
|
||||||
if isinstance(arg, str):
|
if isinstance(arg, basestring):
|
||||||
self.finishAsyncCommand(arg)
|
self.finishAsyncCommand(arg)
|
||||||
else:
|
else:
|
||||||
self.finishAsyncCommand("Exited with %s" % arg)
|
self.finishAsyncCommand("Exited with %s" % arg)
|
||||||
return False
|
return False
|
||||||
except Exception as exc:
|
except Exception:
|
||||||
import traceback
|
import traceback
|
||||||
if isinstance(exc, bb.BBHandledException):
|
self.finishAsyncCommand(traceback.format_exc())
|
||||||
self.finishAsyncCommand("")
|
|
||||||
else:
|
|
||||||
self.finishAsyncCommand(traceback.format_exc())
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def finishAsyncCommand(self, msg=None, code=None):
|
def finishAsyncCommand(self, msg=None, code=None):
|
||||||
if msg or msg == "":
|
if msg:
|
||||||
bb.event.fire(CommandFailed(msg), self.cooker.data)
|
bb.event.fire(CommandFailed(msg), self.cooker.configuration.event_data)
|
||||||
elif code:
|
elif code:
|
||||||
bb.event.fire(CommandExit(code), self.cooker.data)
|
bb.event.fire(CommandExit(code), self.cooker.configuration.event_data)
|
||||||
else:
|
else:
|
||||||
bb.event.fire(CommandCompleted(), self.cooker.data)
|
bb.event.fire(CommandCompleted(), self.cooker.configuration.event_data)
|
||||||
self.currentAsyncCommand = None
|
self.currentAsyncCommand = None
|
||||||
self.cooker.finishcommand()
|
|
||||||
|
|
||||||
def split_mc_pn(pn):
|
|
||||||
if pn.startswith("multiconfig:"):
|
|
||||||
_, mc, pn = pn.split(":", 2)
|
|
||||||
return (mc, pn)
|
|
||||||
return ('', pn)
|
|
||||||
|
|
||||||
class CommandsSync:
|
class CommandsSync:
|
||||||
"""
|
"""
|
||||||
|
@ -158,425 +139,46 @@ class CommandsSync:
|
||||||
"""
|
"""
|
||||||
Trigger cooker 'shutdown' mode
|
Trigger cooker 'shutdown' mode
|
||||||
"""
|
"""
|
||||||
command.cooker.shutdown(False)
|
command.cooker.shutdown()
|
||||||
|
|
||||||
def stateForceShutdown(self, command, params):
|
def stateStop(self, command, params):
|
||||||
"""
|
"""
|
||||||
Stop the cooker
|
Stop the cooker
|
||||||
"""
|
"""
|
||||||
command.cooker.shutdown(True)
|
command.cooker.stop()
|
||||||
|
|
||||||
def getAllKeysWithFlags(self, command, params):
|
def getCmdLineAction(self, command, params):
|
||||||
"""
|
"""
|
||||||
Returns a dump of the global state. Call with
|
Get any command parsed from the commandline
|
||||||
variable flags to be retrieved as params.
|
|
||||||
"""
|
"""
|
||||||
flaglist = params[0]
|
return command.cooker.commandlineAction
|
||||||
return command.cooker.getAllKeysWithFlags(flaglist)
|
|
||||||
getAllKeysWithFlags.readonly = True
|
|
||||||
|
|
||||||
def getVariable(self, command, params):
|
def getVariable(self, command, params):
|
||||||
"""
|
"""
|
||||||
Read the value of a variable from data
|
Read the value of a variable from configuration.data
|
||||||
"""
|
"""
|
||||||
varname = params[0]
|
varname = params[0]
|
||||||
expand = True
|
expand = True
|
||||||
if len(params) > 1:
|
if len(params) > 1:
|
||||||
expand = (params[1] == "True")
|
expand = params[1]
|
||||||
|
|
||||||
return command.cooker.data.getVar(varname, expand)
|
return bb.data.getVar(varname, command.cooker.configuration.data, expand)
|
||||||
getVariable.readonly = True
|
|
||||||
|
|
||||||
def setVariable(self, command, params):
|
def setVariable(self, command, params):
|
||||||
"""
|
"""
|
||||||
Set the value of variable in data
|
Set the value of variable in configuration.data
|
||||||
"""
|
"""
|
||||||
varname = params[0]
|
varname = params[0]
|
||||||
value = str(params[1])
|
value = params[1]
|
||||||
command.cooker.extraconfigdata[varname] = value
|
bb.data.setVar(varname, value, command.cooker.configuration.data)
|
||||||
command.cooker.data.setVar(varname, value)
|
|
||||||
|
|
||||||
def getSetVariable(self, command, params):
|
def resetCooker(self, command, params):
|
||||||
"""
|
"""
|
||||||
Read the value of a variable from data and set it into the datastore
|
Reset the cooker to its initial state, thus forcing a reparse for
|
||||||
which effectively expands and locks the value.
|
any async command that has the needcache property set to True
|
||||||
"""
|
"""
|
||||||
varname = params[0]
|
command.cooker.reset()
|
||||||
result = self.getVariable(command, params)
|
|
||||||
command.cooker.data.setVar(varname, result)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def setConfig(self, command, params):
|
|
||||||
"""
|
|
||||||
Set the value of variable in configuration
|
|
||||||
"""
|
|
||||||
varname = params[0]
|
|
||||||
value = str(params[1])
|
|
||||||
setattr(command.cooker.configuration, varname, value)
|
|
||||||
|
|
||||||
def enableDataTracking(self, command, params):
|
|
||||||
"""
|
|
||||||
Enable history tracking for variables
|
|
||||||
"""
|
|
||||||
command.cooker.enableDataTracking()
|
|
||||||
|
|
||||||
def disableDataTracking(self, command, params):
|
|
||||||
"""
|
|
||||||
Disable history tracking for variables
|
|
||||||
"""
|
|
||||||
command.cooker.disableDataTracking()
|
|
||||||
|
|
||||||
def setPrePostConfFiles(self, command, params):
|
|
||||||
prefiles = params[0].split()
|
|
||||||
postfiles = params[1].split()
|
|
||||||
command.cooker.configuration.prefile = prefiles
|
|
||||||
command.cooker.configuration.postfile = postfiles
|
|
||||||
setPrePostConfFiles.needconfig = False
|
|
||||||
|
|
||||||
def getCpuCount(self, command, params):
|
|
||||||
"""
|
|
||||||
Get the CPU count on the bitbake server
|
|
||||||
"""
|
|
||||||
return bb.utils.cpu_count()
|
|
||||||
getCpuCount.readonly = True
|
|
||||||
getCpuCount.needconfig = False
|
|
||||||
|
|
||||||
def matchFile(self, command, params):
|
|
||||||
fMatch = params[0]
|
|
||||||
return command.cooker.matchFile(fMatch)
|
|
||||||
matchFile.needconfig = False
|
|
||||||
|
|
||||||
def generateNewImage(self, command, params):
|
|
||||||
image = params[0]
|
|
||||||
base_image = params[1]
|
|
||||||
package_queue = params[2]
|
|
||||||
timestamp = params[3]
|
|
||||||
description = params[4]
|
|
||||||
return command.cooker.generateNewImage(image, base_image,
|
|
||||||
package_queue, timestamp, description)
|
|
||||||
|
|
||||||
def ensureDir(self, command, params):
|
|
||||||
directory = params[0]
|
|
||||||
bb.utils.mkdirhier(directory)
|
|
||||||
ensureDir.needconfig = False
|
|
||||||
|
|
||||||
def setVarFile(self, command, params):
|
|
||||||
"""
|
|
||||||
Save a variable in a file; used for saving in a configuration file
|
|
||||||
"""
|
|
||||||
var = params[0]
|
|
||||||
val = params[1]
|
|
||||||
default_file = params[2]
|
|
||||||
op = params[3]
|
|
||||||
command.cooker.modifyConfigurationVar(var, val, default_file, op)
|
|
||||||
setVarFile.needconfig = False
|
|
||||||
|
|
||||||
def removeVarFile(self, command, params):
|
|
||||||
"""
|
|
||||||
Remove a variable declaration from a file
|
|
||||||
"""
|
|
||||||
var = params[0]
|
|
||||||
command.cooker.removeConfigurationVar(var)
|
|
||||||
removeVarFile.needconfig = False
|
|
||||||
|
|
||||||
def createConfigFile(self, command, params):
|
|
||||||
"""
|
|
||||||
Create an extra configuration file
|
|
||||||
"""
|
|
||||||
name = params[0]
|
|
||||||
command.cooker.createConfigFile(name)
|
|
||||||
createConfigFile.needconfig = False
|
|
||||||
|
|
||||||
def setEventMask(self, command, params):
|
|
||||||
handlerNum = params[0]
|
|
||||||
llevel = params[1]
|
|
||||||
debug_domains = params[2]
|
|
||||||
mask = params[3]
|
|
||||||
return bb.event.set_UIHmask(handlerNum, llevel, debug_domains, mask)
|
|
||||||
setEventMask.needconfig = False
|
|
||||||
setEventMask.readonly = True
|
|
||||||
|
|
||||||
def setFeatures(self, command, params):
|
|
||||||
"""
|
|
||||||
Set the cooker features to include the passed list of features
|
|
||||||
"""
|
|
||||||
features = params[0]
|
|
||||||
command.cooker.setFeatures(features)
|
|
||||||
setFeatures.needconfig = False
|
|
||||||
# although we change the internal state of the cooker, this is transparent since
|
|
||||||
# we always take and leave the cooker in state.initial
|
|
||||||
setFeatures.readonly = True
|
|
||||||
|
|
||||||
def updateConfig(self, command, params):
|
|
||||||
options = params[0]
|
|
||||||
environment = params[1]
|
|
||||||
cmdline = params[2]
|
|
||||||
command.cooker.updateConfigOpts(options, environment, cmdline)
|
|
||||||
updateConfig.needconfig = False
|
|
||||||
|
|
||||||
def parseConfiguration(self, command, params):
|
|
||||||
"""Instruct bitbake to parse its configuration
|
|
||||||
NOTE: it is only necessary to call this if you aren't calling any normal action
|
|
||||||
(otherwise parsing is taken care of automatically)
|
|
||||||
"""
|
|
||||||
command.cooker.parseConfiguration()
|
|
||||||
parseConfiguration.needconfig = False
|
|
||||||
|
|
||||||
def getLayerPriorities(self, command, params):
|
|
||||||
ret = []
|
|
||||||
# regex objects cannot be marshalled by xmlrpc
|
|
||||||
for collection, pattern, regex, pri in command.cooker.bbfile_config_priorities:
|
|
||||||
ret.append((collection, pattern, regex.pattern, pri))
|
|
||||||
return ret
|
|
||||||
getLayerPriorities.readonly = True
|
|
||||||
|
|
||||||
def getRecipes(self, command, params):
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
return list(command.cooker.recipecaches[mc].pkg_pn.items())
|
|
||||||
getRecipes.readonly = True
|
|
||||||
|
|
||||||
def getRecipeDepends(self, command, params):
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
return list(command.cooker.recipecaches[mc].deps.items())
|
|
||||||
getRecipeDepends.readonly = True
|
|
||||||
|
|
||||||
def getRecipeVersions(self, command, params):
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
return command.cooker.recipecaches[mc].pkg_pepvpr
|
|
||||||
getRecipeVersions.readonly = True
|
|
||||||
|
|
||||||
def getRuntimeDepends(self, command, params):
|
|
||||||
ret = []
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
rundeps = command.cooker.recipecaches[mc].rundeps
|
|
||||||
for key, value in rundeps.items():
|
|
||||||
if isinstance(value, defaultdict):
|
|
||||||
value = dict(value)
|
|
||||||
ret.append((key, value))
|
|
||||||
return ret
|
|
||||||
getRuntimeDepends.readonly = True
|
|
||||||
|
|
||||||
def getRuntimeRecommends(self, command, params):
|
|
||||||
ret = []
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
runrecs = command.cooker.recipecaches[mc].runrecs
|
|
||||||
for key, value in runrecs.items():
|
|
||||||
if isinstance(value, defaultdict):
|
|
||||||
value = dict(value)
|
|
||||||
ret.append((key, value))
|
|
||||||
return ret
|
|
||||||
getRuntimeRecommends.readonly = True
|
|
||||||
|
|
||||||
def getRecipeInherits(self, command, params):
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
return command.cooker.recipecaches[mc].inherits
|
|
||||||
getRecipeInherits.readonly = True
|
|
||||||
|
|
||||||
def getBbFilePriority(self, command, params):
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
return command.cooker.recipecaches[mc].bbfile_priority
|
|
||||||
getBbFilePriority.readonly = True
|
|
||||||
|
|
||||||
def getDefaultPreference(self, command, params):
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
return command.cooker.recipecaches[mc].pkg_dp
|
|
||||||
getDefaultPreference.readonly = True
|
|
||||||
|
|
||||||
def getSkippedRecipes(self, command, params):
|
|
||||||
# Return list sorted by reverse priority order
|
|
||||||
import bb.cache
|
|
||||||
skipdict = OrderedDict(sorted(command.cooker.skiplist.items(),
|
|
||||||
key=lambda x: (-command.cooker.collection.calc_bbfile_priority(bb.cache.virtualfn2realfn(x[0])[0]), x[0])))
|
|
||||||
return list(skipdict.items())
|
|
||||||
getSkippedRecipes.readonly = True
|
|
||||||
|
|
||||||
def getOverlayedRecipes(self, command, params):
|
|
||||||
return list(command.cooker.collection.overlayed.items())
|
|
||||||
getOverlayedRecipes.readonly = True
|
|
||||||
|
|
||||||
def getFileAppends(self, command, params):
|
|
||||||
fn = params[0]
|
|
||||||
return command.cooker.collection.get_file_appends(fn)
|
|
||||||
getFileAppends.readonly = True
|
|
||||||
|
|
||||||
def getAllAppends(self, command, params):
|
|
||||||
return command.cooker.collection.bbappends
|
|
||||||
getAllAppends.readonly = True
|
|
||||||
|
|
||||||
def findProviders(self, command, params):
|
|
||||||
return command.cooker.findProviders()
|
|
||||||
findProviders.readonly = True
|
|
||||||
|
|
||||||
def findBestProvider(self, command, params):
|
|
||||||
(mc, pn) = split_mc_pn(params[0])
|
|
||||||
return command.cooker.findBestProvider(pn, mc)
|
|
||||||
findBestProvider.readonly = True
|
|
||||||
|
|
||||||
def allProviders(self, command, params):
|
|
||||||
try:
|
|
||||||
mc = params[0]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
return list(bb.providers.allProviders(command.cooker.recipecaches[mc]).items())
|
|
||||||
allProviders.readonly = True
|
|
||||||
|
|
||||||
def getRuntimeProviders(self, command, params):
|
|
||||||
rprovide = params[0]
|
|
||||||
try:
|
|
||||||
mc = params[1]
|
|
||||||
except IndexError:
|
|
||||||
mc = ''
|
|
||||||
all_p = bb.providers.getRuntimeProviders(command.cooker.recipecaches[mc], rprovide)
|
|
||||||
if all_p:
|
|
||||||
best = bb.providers.filterProvidersRunTime(all_p, rprovide,
|
|
||||||
command.cooker.data,
|
|
||||||
command.cooker.recipecaches[mc])[0][0]
|
|
||||||
else:
|
|
||||||
best = None
|
|
||||||
return all_p, best
|
|
||||||
getRuntimeProviders.readonly = True
|
|
||||||
|
|
||||||
def dataStoreConnectorFindVar(self, command, params):
|
|
||||||
dsindex = params[0]
|
|
||||||
name = params[1]
|
|
||||||
datastore = command.remotedatastores[dsindex]
|
|
||||||
value, overridedata = datastore._findVar(name)
|
|
||||||
|
|
||||||
if value:
|
|
||||||
content = value.get('_content', None)
|
|
||||||
if isinstance(content, bb.data_smart.DataSmart):
|
|
||||||
# Value is a datastore (e.g. BB_ORIGENV) - need to handle this carefully
|
|
||||||
idx = command.remotedatastores.check_store(content, True)
|
|
||||||
return {'_content': DataStoreConnectionHandle(idx),
|
|
||||||
'_connector_origtype': 'DataStoreConnectionHandle',
|
|
||||||
'_connector_overrides': overridedata}
|
|
||||||
elif isinstance(content, set):
|
|
||||||
return {'_content': list(content),
|
|
||||||
'_connector_origtype': 'set',
|
|
||||||
'_connector_overrides': overridedata}
|
|
||||||
else:
|
|
||||||
value['_connector_overrides'] = overridedata
|
|
||||||
else:
|
|
||||||
value = {}
|
|
||||||
value['_connector_overrides'] = overridedata
|
|
||||||
return value
|
|
||||||
dataStoreConnectorFindVar.readonly = True
|
|
||||||
|
|
||||||
def dataStoreConnectorGetKeys(self, command, params):
|
|
||||||
dsindex = params[0]
|
|
||||||
datastore = command.remotedatastores[dsindex]
|
|
||||||
return list(datastore.keys())
|
|
||||||
dataStoreConnectorGetKeys.readonly = True
|
|
||||||
|
|
||||||
def dataStoreConnectorGetVarHistory(self, command, params):
|
|
||||||
dsindex = params[0]
|
|
||||||
name = params[1]
|
|
||||||
datastore = command.remotedatastores[dsindex]
|
|
||||||
return datastore.varhistory.variable(name)
|
|
||||||
dataStoreConnectorGetVarHistory.readonly = True
|
|
||||||
|
|
||||||
def dataStoreConnectorExpandPythonRef(self, command, params):
|
|
||||||
config_data_dict = params[0]
|
|
||||||
varname = params[1]
|
|
||||||
expr = params[2]
|
|
||||||
|
|
||||||
config_data = command.remotedatastores.receive_datastore(config_data_dict)
|
|
||||||
|
|
||||||
varparse = bb.data_smart.VariableParse(varname, config_data)
|
|
||||||
return varparse.python_sub(expr)
|
|
||||||
|
|
||||||
def dataStoreConnectorRelease(self, command, params):
|
|
||||||
dsindex = params[0]
|
|
||||||
if dsindex <= 0:
|
|
||||||
raise CommandError('dataStoreConnectorRelease: invalid index %d' % dsindex)
|
|
||||||
command.remotedatastores.release(dsindex)
|
|
||||||
|
|
||||||
def dataStoreConnectorSetVarFlag(self, command, params):
|
|
||||||
dsindex = params[0]
|
|
||||||
name = params[1]
|
|
||||||
flag = params[2]
|
|
||||||
value = params[3]
|
|
||||||
datastore = command.remotedatastores[dsindex]
|
|
||||||
datastore.setVarFlag(name, flag, value)
|
|
||||||
|
|
||||||
def dataStoreConnectorDelVar(self, command, params):
|
|
||||||
dsindex = params[0]
|
|
||||||
name = params[1]
|
|
||||||
datastore = command.remotedatastores[dsindex]
|
|
||||||
if len(params) > 2:
|
|
||||||
flag = params[2]
|
|
||||||
datastore.delVarFlag(name, flag)
|
|
||||||
else:
|
|
||||||
datastore.delVar(name)
|
|
||||||
|
|
||||||
def dataStoreConnectorRenameVar(self, command, params):
|
|
||||||
dsindex = params[0]
|
|
||||||
name = params[1]
|
|
||||||
newname = params[2]
|
|
||||||
datastore = command.remotedatastores[dsindex]
|
|
||||||
datastore.renameVar(name, newname)
|
|
||||||
|
|
||||||
def parseRecipeFile(self, command, params):
|
|
||||||
"""
|
|
||||||
Parse the specified recipe file (with or without bbappends)
|
|
||||||
and return a datastore object representing the environment
|
|
||||||
for the recipe.
|
|
||||||
"""
|
|
||||||
fn = params[0]
|
|
||||||
appends = params[1]
|
|
||||||
appendlist = params[2]
|
|
||||||
if len(params) > 3:
|
|
||||||
config_data_dict = params[3]
|
|
||||||
config_data = command.remotedatastores.receive_datastore(config_data_dict)
|
|
||||||
else:
|
|
||||||
config_data = None
|
|
||||||
|
|
||||||
if appends:
|
|
||||||
if appendlist is not None:
|
|
||||||
appendfiles = appendlist
|
|
||||||
else:
|
|
||||||
appendfiles = command.cooker.collection.get_file_appends(fn)
|
|
||||||
else:
|
|
||||||
appendfiles = []
|
|
||||||
# We are calling bb.cache locally here rather than on the server,
|
|
||||||
# but that's OK because it doesn't actually need anything from
|
|
||||||
# the server barring the global datastore (which we have a remote
|
|
||||||
# version of)
|
|
||||||
if config_data:
|
|
||||||
# We have to use a different function here if we're passing in a datastore
|
|
||||||
# NOTE: we took a copy above, so we don't do it here again
|
|
||||||
envdata = bb.cache.parse_recipe(config_data, fn, appendfiles)['']
|
|
||||||
else:
|
|
||||||
# Use the standard path
|
|
||||||
parser = bb.cache.NoCache(command.cooker.databuilder)
|
|
||||||
envdata = parser.loadDataFull(fn, appendfiles)
|
|
||||||
idx = command.remotedatastores.store(envdata)
|
|
||||||
return DataStoreConnectionHandle(idx)
|
|
||||||
parseRecipeFile.readonly = True
|
|
||||||
|
|
||||||
class CommandsAsync:
|
class CommandsAsync:
|
||||||
"""
|
"""
|
||||||
|
@ -591,12 +193,8 @@ class CommandsAsync:
|
||||||
"""
|
"""
|
||||||
bfile = params[0]
|
bfile = params[0]
|
||||||
task = params[1]
|
task = params[1]
|
||||||
if len(params) > 2:
|
|
||||||
hidewarning = params[2]
|
|
||||||
else:
|
|
||||||
hidewarning = False
|
|
||||||
|
|
||||||
command.cooker.buildFile(bfile, task, hidewarning)
|
command.cooker.buildFile(bfile, task)
|
||||||
buildFile.needcache = False
|
buildFile.needcache = False
|
||||||
|
|
||||||
def buildTargets(self, command, params):
|
def buildTargets(self, command, params):
|
||||||
|
@ -640,23 +238,15 @@ class CommandsAsync:
|
||||||
klass) rather than generating a tree for all packages.
|
klass) rather than generating a tree for all packages.
|
||||||
"""
|
"""
|
||||||
klass = params[0]
|
klass = params[0]
|
||||||
pkg_list = params[1]
|
if len(params) > 1:
|
||||||
|
pkg_list = params[1]
|
||||||
|
else:
|
||||||
|
pkg_list = []
|
||||||
|
|
||||||
command.cooker.generateTargetsTree(klass, pkg_list)
|
command.cooker.generateTargetsTree(klass, pkg_list)
|
||||||
command.finishAsyncCommand()
|
command.finishAsyncCommand()
|
||||||
generateTargetsTree.needcache = True
|
generateTargetsTree.needcache = True
|
||||||
|
|
||||||
def findCoreBaseFiles(self, command, params):
|
|
||||||
"""
|
|
||||||
Find certain files in COREBASE directory. i.e. Layers
|
|
||||||
"""
|
|
||||||
subdir = params[0]
|
|
||||||
filename = params[1]
|
|
||||||
|
|
||||||
command.cooker.findCoreBaseFiles(subdir, filename)
|
|
||||||
command.finishAsyncCommand()
|
|
||||||
findCoreBaseFiles.needcache = False
|
|
||||||
|
|
||||||
def findConfigFiles(self, command, params):
|
def findConfigFiles(self, command, params):
|
||||||
"""
|
"""
|
||||||
Find config files which provide appropriate values
|
Find config files which provide appropriate values
|
||||||
|
@ -666,7 +256,7 @@ class CommandsAsync:
|
||||||
|
|
||||||
command.cooker.findConfigFiles(varname)
|
command.cooker.findConfigFiles(varname)
|
||||||
command.finishAsyncCommand()
|
command.finishAsyncCommand()
|
||||||
findConfigFiles.needcache = False
|
findConfigFiles.needcache = True
|
||||||
|
|
||||||
def findFilesMatchingInDir(self, command, params):
|
def findFilesMatchingInDir(self, command, params):
|
||||||
"""
|
"""
|
||||||
|
@ -678,7 +268,7 @@ class CommandsAsync:
|
||||||
|
|
||||||
command.cooker.findFilesMatchingInDir(pattern, directory)
|
command.cooker.findFilesMatchingInDir(pattern, directory)
|
||||||
command.finishAsyncCommand()
|
command.finishAsyncCommand()
|
||||||
findFilesMatchingInDir.needcache = False
|
findFilesMatchingInDir.needcache = True
|
||||||
|
|
||||||
def findConfigFilePath(self, command, params):
|
def findConfigFilePath(self, command, params):
|
||||||
"""
|
"""
|
||||||
|
@ -728,39 +318,20 @@ class CommandsAsync:
|
||||||
command.finishAsyncCommand()
|
command.finishAsyncCommand()
|
||||||
parseFiles.needcache = True
|
parseFiles.needcache = True
|
||||||
|
|
||||||
|
def reparseFiles(self, command, params):
|
||||||
|
"""
|
||||||
|
Reparse .bb files
|
||||||
|
"""
|
||||||
|
command.cooker.reparseFiles()
|
||||||
|
command.finishAsyncCommand()
|
||||||
|
reparseFiles.needcache = True
|
||||||
|
|
||||||
def compareRevisions(self, command, params):
|
def compareRevisions(self, command, params):
|
||||||
"""
|
"""
|
||||||
Parse the .bb files
|
Parse the .bb files
|
||||||
"""
|
"""
|
||||||
if bb.fetch.fetcher_compare_revisions(command.cooker.data):
|
if bb.fetch.fetcher_compare_revisions(command.cooker.configuration.data):
|
||||||
command.finishAsyncCommand(code=1)
|
command.finishAsyncCommand(code=1)
|
||||||
else:
|
else:
|
||||||
command.finishAsyncCommand()
|
command.finishAsyncCommand()
|
||||||
compareRevisions.needcache = True
|
compareRevisions.needcache = True
|
||||||
|
|
||||||
def triggerEvent(self, command, params):
|
|
||||||
"""
|
|
||||||
Trigger a certain event
|
|
||||||
"""
|
|
||||||
event = params[0]
|
|
||||||
bb.event.fire(eval(event), command.cooker.data)
|
|
||||||
command.currentAsyncCommand = None
|
|
||||||
triggerEvent.needcache = False
|
|
||||||
|
|
||||||
def resetCooker(self, command, params):
|
|
||||||
"""
|
|
||||||
Reset the cooker to its initial state, thus forcing a reparse for
|
|
||||||
any async command that has the needcache property set to True
|
|
||||||
"""
|
|
||||||
command.cooker.reset()
|
|
||||||
command.finishAsyncCommand()
|
|
||||||
resetCooker.needcache = False
|
|
||||||
|
|
||||||
def clientComplete(self, command, params):
|
|
||||||
"""
|
|
||||||
Do the right thing when the controlling client exits
|
|
||||||
"""
|
|
||||||
command.cooker.clientComplete()
|
|
||||||
command.finishAsyncCommand()
|
|
||||||
clientComplete.needcache = False
|
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,28 @@
|
||||||
"""Code pulled from future python versions, here for compatibility"""
|
"""Code pulled from future python versions, here for compatibility"""
|
||||||
|
|
||||||
from collections import MutableMapping, KeysView, ValuesView, ItemsView, OrderedDict
|
def total_ordering(cls):
|
||||||
from functools import total_ordering
|
"""Class decorator that fills in missing ordering methods"""
|
||||||
|
convert = {
|
||||||
|
'__lt__': [('__gt__', lambda self, other: other < self),
|
||||||
|
('__le__', lambda self, other: not other < self),
|
||||||
|
('__ge__', lambda self, other: not self < other)],
|
||||||
|
'__le__': [('__ge__', lambda self, other: other <= self),
|
||||||
|
('__lt__', lambda self, other: not other <= self),
|
||||||
|
('__gt__', lambda self, other: not self <= other)],
|
||||||
|
'__gt__': [('__lt__', lambda self, other: other > self),
|
||||||
|
('__ge__', lambda self, other: not other > self),
|
||||||
|
('__le__', lambda self, other: not self > other)],
|
||||||
|
'__ge__': [('__le__', lambda self, other: other >= self),
|
||||||
|
('__gt__', lambda self, other: not other >= self),
|
||||||
|
('__lt__', lambda self, other: not self >= other)]
|
||||||
|
}
|
||||||
|
roots = set(dir(cls)) & set(convert)
|
||||||
|
if not roots:
|
||||||
|
raise ValueError('must define at least one ordering operation: < > <= >=')
|
||||||
|
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
|
||||||
|
for opname, opfunc in convert[root]:
|
||||||
|
if opname not in roots:
|
||||||
|
opfunc.__name__ = opname
|
||||||
|
opfunc.__doc__ = getattr(int, opname).__doc__
|
||||||
|
setattr(cls, opname, opfunc)
|
||||||
|
return cls
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,381 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
#
|
|
||||||
# Copyright (C) 2003, 2004 Chris Larson
|
|
||||||
# Copyright (C) 2003, 2004 Phil Blundell
|
|
||||||
# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
|
|
||||||
# Copyright (C) 2005 Holger Hans Peter Freyther
|
|
||||||
# Copyright (C) 2005 ROAD GmbH
|
|
||||||
# Copyright (C) 2006 Richard Purdie
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from functools import wraps
|
|
||||||
import bb
|
|
||||||
from bb import data
|
|
||||||
import bb.parse
|
|
||||||
|
|
||||||
logger = logging.getLogger("BitBake")
|
|
||||||
parselog = logging.getLogger("BitBake.Parsing")
|
|
||||||
|
|
||||||
class ConfigParameters(object):
|
|
||||||
def __init__(self, argv=sys.argv):
|
|
||||||
self.options, targets = self.parseCommandLine(argv)
|
|
||||||
self.environment = self.parseEnvironment()
|
|
||||||
|
|
||||||
self.options.pkgs_to_build = targets or []
|
|
||||||
|
|
||||||
self.options.tracking = False
|
|
||||||
if hasattr(self.options, "show_environment") and self.options.show_environment:
|
|
||||||
self.options.tracking = True
|
|
||||||
|
|
||||||
for key, val in self.options.__dict__.items():
|
|
||||||
setattr(self, key, val)
|
|
||||||
|
|
||||||
def parseCommandLine(self, argv=sys.argv):
|
|
||||||
raise Exception("Caller must implement commandline option parsing")
|
|
||||||
|
|
||||||
def parseEnvironment(self):
|
|
||||||
return os.environ.copy()
|
|
||||||
|
|
||||||
def updateFromServer(self, server):
|
|
||||||
if not self.options.cmd:
|
|
||||||
defaulttask, error = server.runCommand(["getVariable", "BB_DEFAULT_TASK"])
|
|
||||||
if error:
|
|
||||||
raise Exception("Unable to get the value of BB_DEFAULT_TASK from the server: %s" % error)
|
|
||||||
self.options.cmd = defaulttask or "build"
|
|
||||||
_, error = server.runCommand(["setConfig", "cmd", self.options.cmd])
|
|
||||||
if error:
|
|
||||||
raise Exception("Unable to set configuration option 'cmd' on the server: %s" % error)
|
|
||||||
|
|
||||||
if not self.options.pkgs_to_build:
|
|
||||||
bbpkgs, error = server.runCommand(["getVariable", "BBTARGETS"])
|
|
||||||
if error:
|
|
||||||
raise Exception("Unable to get the value of BBTARGETS from the server: %s" % error)
|
|
||||||
if bbpkgs:
|
|
||||||
self.options.pkgs_to_build.extend(bbpkgs.split())
|
|
||||||
|
|
||||||
def updateToServer(self, server, environment):
|
|
||||||
options = {}
|
|
||||||
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
|
|
||||||
"verbose", "debug", "dry_run", "dump_signatures",
|
|
||||||
"debug_domains", "extra_assume_provided", "profile",
|
|
||||||
"prefile", "postfile"]:
|
|
||||||
options[o] = getattr(self.options, o)
|
|
||||||
|
|
||||||
ret, error = server.runCommand(["updateConfig", options, environment, sys.argv])
|
|
||||||
if error:
|
|
||||||
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
|
|
||||||
|
|
||||||
def parseActions(self):
|
|
||||||
# Parse any commandline into actions
|
|
||||||
action = {'action':None, 'msg':None}
|
|
||||||
if self.options.show_environment:
|
|
||||||
if 'world' in self.options.pkgs_to_build:
|
|
||||||
action['msg'] = "'world' is not a valid target for --environment."
|
|
||||||
elif 'universe' in self.options.pkgs_to_build:
|
|
||||||
action['msg'] = "'universe' is not a valid target for --environment."
|
|
||||||
elif len(self.options.pkgs_to_build) > 1:
|
|
||||||
action['msg'] = "Only one target can be used with the --environment option."
|
|
||||||
elif self.options.buildfile and len(self.options.pkgs_to_build) > 0:
|
|
||||||
action['msg'] = "No target should be used with the --environment and --buildfile options."
|
|
||||||
elif len(self.options.pkgs_to_build) > 0:
|
|
||||||
action['action'] = ["showEnvironmentTarget", self.options.pkgs_to_build]
|
|
||||||
else:
|
|
||||||
action['action'] = ["showEnvironment", self.options.buildfile]
|
|
||||||
elif self.options.buildfile is not None:
|
|
||||||
action['action'] = ["buildFile", self.options.buildfile, self.options.cmd]
|
|
||||||
elif self.options.revisions_changed:
|
|
||||||
action['action'] = ["compareRevisions"]
|
|
||||||
elif self.options.show_versions:
|
|
||||||
action['action'] = ["showVersions"]
|
|
||||||
elif self.options.parse_only:
|
|
||||||
action['action'] = ["parseFiles"]
|
|
||||||
elif self.options.dot_graph:
|
|
||||||
if self.options.pkgs_to_build:
|
|
||||||
action['action'] = ["generateDotGraph", self.options.pkgs_to_build, self.options.cmd]
|
|
||||||
else:
|
|
||||||
action['msg'] = "Please specify a package name for dependency graph generation."
|
|
||||||
else:
|
|
||||||
if self.options.pkgs_to_build:
|
|
||||||
action['action'] = ["buildTargets", self.options.pkgs_to_build, self.options.cmd]
|
|
||||||
else:
|
|
||||||
#action['msg'] = "Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information."
|
|
||||||
action = None
|
|
||||||
self.options.initialaction = action
|
|
||||||
return action
|
|
||||||
|
|
||||||
class CookerConfiguration(object):
|
|
||||||
"""
|
|
||||||
Manages build options and configurations for one run
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.debug_domains = []
|
|
||||||
self.extra_assume_provided = []
|
|
||||||
self.prefile = []
|
|
||||||
self.postfile = []
|
|
||||||
self.prefile_server = []
|
|
||||||
self.postfile_server = []
|
|
||||||
self.debug = 0
|
|
||||||
self.cmd = None
|
|
||||||
self.abort = True
|
|
||||||
self.force = False
|
|
||||||
self.profile = False
|
|
||||||
self.nosetscene = False
|
|
||||||
self.setsceneonly = False
|
|
||||||
self.invalidate_stamp = False
|
|
||||||
self.dump_signatures = []
|
|
||||||
self.dry_run = False
|
|
||||||
self.tracking = False
|
|
||||||
self.interface = []
|
|
||||||
self.writeeventlog = False
|
|
||||||
self.server_only = False
|
|
||||||
self.limited_deps = False
|
|
||||||
self.runall = None
|
|
||||||
|
|
||||||
self.env = {}
|
|
||||||
|
|
||||||
def setConfigParameters(self, parameters):
|
|
||||||
for key in self.__dict__.keys():
|
|
||||||
if key in parameters.options.__dict__:
|
|
||||||
setattr(self, key, parameters.options.__dict__[key])
|
|
||||||
self.env = parameters.environment.copy()
|
|
||||||
self.tracking = parameters.tracking
|
|
||||||
|
|
||||||
def setServerRegIdleCallback(self, srcb):
|
|
||||||
self.server_register_idlecallback = srcb
|
|
||||||
|
|
||||||
def __getstate__(self):
|
|
||||||
state = {}
|
|
||||||
for key in self.__dict__.keys():
|
|
||||||
if key == "server_register_idlecallback":
|
|
||||||
state[key] = None
|
|
||||||
else:
|
|
||||||
state[key] = getattr(self, key)
|
|
||||||
return state
|
|
||||||
|
|
||||||
def __setstate__(self,state):
|
|
||||||
for k in state:
|
|
||||||
setattr(self, k, state[k])
|
|
||||||
|
|
||||||
|
|
||||||
def catch_parse_error(func):
|
|
||||||
"""Exception handling bits for our parsing"""
|
|
||||||
@wraps(func)
|
|
||||||
def wrapped(fn, *args):
|
|
||||||
try:
|
|
||||||
return func(fn, *args)
|
|
||||||
except IOError as exc:
|
|
||||||
import traceback
|
|
||||||
parselog.critical(traceback.format_exc())
|
|
||||||
parselog.critical("Unable to parse %s: %s" % (fn, exc))
|
|
||||||
sys.exit(1)
|
|
||||||
except bb.data_smart.ExpansionError as exc:
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
bbdir = os.path.dirname(__file__) + os.sep
|
|
||||||
exc_class, exc, tb = sys.exc_info()
|
|
||||||
for tb in iter(lambda: tb.tb_next, None):
|
|
||||||
# Skip frames in bitbake itself, we only want the metadata
|
|
||||||
fn, _, _, _ = traceback.extract_tb(tb, 1)[0]
|
|
||||||
if not fn.startswith(bbdir):
|
|
||||||
break
|
|
||||||
parselog.critical("Unable to parse %s" % fn, exc_info=(exc_class, exc, tb))
|
|
||||||
sys.exit(1)
|
|
||||||
except bb.parse.ParseError as exc:
|
|
||||||
parselog.critical(str(exc))
|
|
||||||
sys.exit(1)
|
|
||||||
return wrapped
|
|
||||||
|
|
||||||
@catch_parse_error
|
|
||||||
def parse_config_file(fn, data, include=True):
|
|
||||||
return bb.parse.handle(fn, data, include)
|
|
||||||
|
|
||||||
@catch_parse_error
|
|
||||||
def _inherit(bbclass, data):
|
|
||||||
bb.parse.BBHandler.inherit(bbclass, "configuration INHERITs", 0, data)
|
|
||||||
return data
|
|
||||||
|
|
||||||
def findConfigFile(configfile, data):
|
|
||||||
search = []
|
|
||||||
bbpath = data.getVar("BBPATH")
|
|
||||||
if bbpath:
|
|
||||||
for i in bbpath.split(":"):
|
|
||||||
search.append(os.path.join(i, "conf", configfile))
|
|
||||||
path = os.getcwd()
|
|
||||||
while path != "/":
|
|
||||||
search.append(os.path.join(path, "conf", configfile))
|
|
||||||
path, _ = os.path.split(path)
|
|
||||||
|
|
||||||
for i in search:
|
|
||||||
if os.path.exists(i):
|
|
||||||
return i
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
class CookerDataBuilder(object):
|
|
||||||
|
|
||||||
def __init__(self, cookercfg, worker = False):
|
|
||||||
|
|
||||||
self.prefiles = cookercfg.prefile
|
|
||||||
self.postfiles = cookercfg.postfile
|
|
||||||
self.tracking = cookercfg.tracking
|
|
||||||
|
|
||||||
bb.utils.set_context(bb.utils.clean_context())
|
|
||||||
bb.event.set_class_handlers(bb.event.clean_class_handlers())
|
|
||||||
self.basedata = bb.data.init()
|
|
||||||
if self.tracking:
|
|
||||||
self.basedata.enableTracking()
|
|
||||||
|
|
||||||
# Keep a datastore of the initial environment variables and their
|
|
||||||
# values from when BitBake was launched to enable child processes
|
|
||||||
# to use environment variables which have been cleaned from the
|
|
||||||
# BitBake processes env
|
|
||||||
self.savedenv = bb.data.init()
|
|
||||||
for k in cookercfg.env:
|
|
||||||
self.savedenv.setVar(k, cookercfg.env[k])
|
|
||||||
|
|
||||||
filtered_keys = bb.utils.approved_variables()
|
|
||||||
bb.data.inheritFromOS(self.basedata, self.savedenv, filtered_keys)
|
|
||||||
self.basedata.setVar("BB_ORIGENV", self.savedenv)
|
|
||||||
|
|
||||||
if worker:
|
|
||||||
self.basedata.setVar("BB_WORKERCONTEXT", "1")
|
|
||||||
|
|
||||||
self.data = self.basedata
|
|
||||||
self.mcdata = {}
|
|
||||||
|
|
||||||
def parseBaseConfiguration(self):
|
|
||||||
try:
|
|
||||||
bb.parse.init_parser(self.basedata)
|
|
||||||
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
|
||||||
|
|
||||||
if self.data.getVar("BB_WORKERCONTEXT", False) is None:
|
|
||||||
bb.fetch.fetcher_init(self.data)
|
|
||||||
bb.codeparser.parser_cache_init(self.data)
|
|
||||||
|
|
||||||
bb.event.fire(bb.event.ConfigParsed(), self.data)
|
|
||||||
|
|
||||||
reparse_cnt = 0
|
|
||||||
while self.data.getVar("BB_INVALIDCONF", False) is True:
|
|
||||||
if reparse_cnt > 20:
|
|
||||||
logger.error("Configuration has been re-parsed over 20 times, "
|
|
||||||
"breaking out of the loop...")
|
|
||||||
raise Exception("Too deep config re-parse loop. Check locations where "
|
|
||||||
"BB_INVALIDCONF is being set (ConfigParsed event handlers)")
|
|
||||||
self.data.setVar("BB_INVALIDCONF", False)
|
|
||||||
self.data = self.parseConfigurationFiles(self.prefiles, self.postfiles)
|
|
||||||
reparse_cnt += 1
|
|
||||||
bb.event.fire(bb.event.ConfigParsed(), self.data)
|
|
||||||
|
|
||||||
bb.parse.init_parser(self.data)
|
|
||||||
self.data_hash = self.data.get_hash()
|
|
||||||
self.mcdata[''] = self.data
|
|
||||||
|
|
||||||
multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split()
|
|
||||||
for config in multiconfig:
|
|
||||||
mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config)
|
|
||||||
bb.event.fire(bb.event.ConfigParsed(), mcdata)
|
|
||||||
self.mcdata[config] = mcdata
|
|
||||||
|
|
||||||
except (SyntaxError, bb.BBHandledException):
|
|
||||||
raise bb.BBHandledException
|
|
||||||
except bb.data_smart.ExpansionError as e:
|
|
||||||
logger.error(str(e))
|
|
||||||
raise bb.BBHandledException
|
|
||||||
except Exception:
|
|
||||||
logger.exception("Error parsing configuration files")
|
|
||||||
raise bb.BBHandledException
|
|
||||||
|
|
||||||
def _findLayerConf(self, data):
|
|
||||||
return findConfigFile("bblayers.conf", data)
|
|
||||||
|
|
||||||
def parseConfigurationFiles(self, prefiles, postfiles, mc = "default"):
|
|
||||||
data = bb.data.createCopy(self.basedata)
|
|
||||||
data.setVar("BB_CURRENT_MC", mc)
|
|
||||||
|
|
||||||
# Parse files for loading *before* bitbake.conf and any includes
|
|
||||||
for f in prefiles:
|
|
||||||
data = parse_config_file(f, data)
|
|
||||||
|
|
||||||
layerconf = self._findLayerConf(data)
|
|
||||||
if layerconf:
|
|
||||||
parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
|
|
||||||
# By definition bblayers.conf is in conf/ of TOPDIR.
|
|
||||||
# We may have been called with cwd somewhere else so reset TOPDIR
|
|
||||||
data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf)))
|
|
||||||
data = parse_config_file(layerconf, data)
|
|
||||||
|
|
||||||
layers = (data.getVar('BBLAYERS') or "").split()
|
|
||||||
|
|
||||||
data = bb.data.createCopy(data)
|
|
||||||
approved = bb.utils.approved_variables()
|
|
||||||
for layer in layers:
|
|
||||||
if not os.path.isdir(layer):
|
|
||||||
parselog.critical("Layer directory '%s' does not exist! "
|
|
||||||
"Please check BBLAYERS in %s" % (layer, layerconf))
|
|
||||||
sys.exit(1)
|
|
||||||
parselog.debug(2, "Adding layer %s", layer)
|
|
||||||
if 'HOME' in approved and '~' in layer:
|
|
||||||
layer = os.path.expanduser(layer)
|
|
||||||
if layer.endswith('/'):
|
|
||||||
layer = layer.rstrip('/')
|
|
||||||
data.setVar('LAYERDIR', layer)
|
|
||||||
data.setVar('LAYERDIR_RE', re.escape(layer))
|
|
||||||
data = parse_config_file(os.path.join(layer, "conf", "layer.conf"), data)
|
|
||||||
data.expandVarref('LAYERDIR')
|
|
||||||
data.expandVarref('LAYERDIR_RE')
|
|
||||||
|
|
||||||
data.delVar('LAYERDIR_RE')
|
|
||||||
data.delVar('LAYERDIR')
|
|
||||||
|
|
||||||
if not data.getVar("BBPATH"):
|
|
||||||
msg = "The BBPATH variable is not set"
|
|
||||||
if not layerconf:
|
|
||||||
msg += (" and bitbake did not find a conf/bblayers.conf file in"
|
|
||||||
" the expected location.\nMaybe you accidentally"
|
|
||||||
" invoked bitbake from the wrong directory?")
|
|
||||||
raise SystemExit(msg)
|
|
||||||
|
|
||||||
data = parse_config_file(os.path.join("conf", "bitbake.conf"), data)
|
|
||||||
|
|
||||||
# Parse files for loading *after* bitbake.conf and any includes
|
|
||||||
for p in postfiles:
|
|
||||||
data = parse_config_file(p, data)
|
|
||||||
|
|
||||||
# Handle any INHERITs and inherit the base class
|
|
||||||
bbclasses = ["base"] + (data.getVar('INHERIT') or "").split()
|
|
||||||
for bbclass in bbclasses:
|
|
||||||
data = _inherit(bbclass, data)
|
|
||||||
|
|
||||||
# Nomally we only register event handlers at the end of parsing .bb files
|
|
||||||
# We register any handlers we've found so far here...
|
|
||||||
for var in data.getVar('__BBHANDLERS', False) or []:
|
|
||||||
handlerfn = data.getVarFlag(var, "filename", False)
|
|
||||||
if not handlerfn:
|
|
||||||
parselog.critical("Undefined event handler function '%s'" % var)
|
|
||||||
sys.exit(1)
|
|
||||||
handlerln = int(data.getVarFlag(var, "lineno", False))
|
|
||||||
bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln)
|
|
||||||
|
|
||||||
data.setVar('BBINCLUDED',bb.parse.get_file_depends(data))
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
"""
|
"""
|
||||||
Python Daemonizing helper
|
Python Deamonizing helper
|
||||||
|
|
||||||
Configurable daemon behaviors:
|
Configurable daemon behaviors:
|
||||||
|
|
||||||
|
@ -12,11 +12,8 @@ A failed call to fork() now raises an exception.
|
||||||
|
|
||||||
References:
|
References:
|
||||||
1) Advanced Programming in the Unix Environment: W. Richard Stevens
|
1) Advanced Programming in the Unix Environment: W. Richard Stevens
|
||||||
http://www.apuebook.com/apue3e.html
|
2) Unix Programming Frequently Asked Questions:
|
||||||
2) The Linux Programming Interface: Michael Kerrisk
|
http://www.erlenstar.demon.co.uk/unix/faq_toc.html
|
||||||
http://man7.org/tlpi/index.html
|
|
||||||
3) Unix Programming Frequently Asked Questions:
|
|
||||||
http://www.faqs.org/faqs/unix-faq/programmer/faq/
|
|
||||||
|
|
||||||
Modified to allow a function to be daemonized and return for
|
Modified to allow a function to be daemonized and return for
|
||||||
bitbake use by Richard Purdie
|
bitbake use by Richard Purdie
|
||||||
|
@ -28,7 +25,7 @@ __version__ = "0.2"
|
||||||
|
|
||||||
# Standard Python modules.
|
# Standard Python modules.
|
||||||
import os # Miscellaneous OS interfaces.
|
import os # Miscellaneous OS interfaces.
|
||||||
import sys # System-specific parameters and functions.
|
import sys # System-specific parameters and functions.
|
||||||
|
|
||||||
# Default daemon parameters.
|
# Default daemon parameters.
|
||||||
# File mode creation mask of the daemon.
|
# File mode creation mask of the daemon.
|
||||||
|
@ -131,7 +128,7 @@ def createDaemon(function, logfile):
|
||||||
# of methods to accomplish this task. Three are listed below.
|
# of methods to accomplish this task. Three are listed below.
|
||||||
#
|
#
|
||||||
# Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
|
# Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
|
||||||
# number of open file descriptors to close. If it doesn't exist, use
|
# number of open file descriptors to close. If it doesn't exists, use
|
||||||
# the default value (configurable).
|
# the default value (configurable).
|
||||||
#
|
#
|
||||||
# try:
|
# try:
|
||||||
|
@ -149,7 +146,7 @@ def createDaemon(function, logfile):
|
||||||
# OR
|
# OR
|
||||||
#
|
#
|
||||||
# Use the getrlimit method to retrieve the maximum file descriptor number
|
# Use the getrlimit method to retrieve the maximum file descriptor number
|
||||||
# that can be opened by this process. If there is no limit on the
|
# that can be opened by this process. If there is not limit on the
|
||||||
# resource, use the default value.
|
# resource, use the default value.
|
||||||
#
|
#
|
||||||
import resource # Resource usage information.
|
import resource # Resource usage information.
|
||||||
|
@ -178,8 +175,8 @@ def createDaemon(function, logfile):
|
||||||
# os.dup2(0, 2) # standard error (2)
|
# os.dup2(0, 2) # standard error (2)
|
||||||
|
|
||||||
|
|
||||||
si = open('/dev/null', 'r')
|
si = file('/dev/null', 'r')
|
||||||
so = open(logfile, 'w')
|
so = file(logfile, 'w')
|
||||||
se = so
|
se = so
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ BitBake 'Data' implementations
|
||||||
Functions for interacting with the data structure used by the
|
Functions for interacting with the data structure used by the
|
||||||
BitBake build tools.
|
BitBake build tools.
|
||||||
|
|
||||||
The expandKeys and update_data are the most expensive
|
The expandData and update_data are the most expensive
|
||||||
operations. At night the cookie monster came by and
|
operations. At night the cookie monster came by and
|
||||||
suggested 'give me cookies on setting the variables and
|
suggested 'give me cookies on setting the variables and
|
||||||
things will work out'. Taking this suggestion into account
|
things will work out'. Taking this suggestion into account
|
||||||
|
@ -15,7 +15,7 @@ Analyse von Algorithmen' lecture and the cookie
|
||||||
monster seems to be right. We will track setVar more carefully
|
monster seems to be right. We will track setVar more carefully
|
||||||
to have faster update_data and expandKeys operations.
|
to have faster update_data and expandKeys operations.
|
||||||
|
|
||||||
This is a trade-off between speed and memory again but
|
This is a treade-off between speed and memory again but
|
||||||
the speed is more critical here.
|
the speed is more critical here.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ the speed is more critical here.
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
#
|
#
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
import sys, os, re
|
import sys, os, re
|
||||||
if sys.argv[0][-5:] == "pydoc":
|
if sys.argv[0][-5:] == "pydoc":
|
||||||
|
@ -59,7 +59,7 @@ def init():
|
||||||
def init_db(parent = None):
|
def init_db(parent = None):
|
||||||
"""Return a new object representing the Bitbake data,
|
"""Return a new object representing the Bitbake data,
|
||||||
optionally based on an existing object"""
|
optionally based on an existing object"""
|
||||||
if parent is not None:
|
if parent:
|
||||||
return parent.createCopy()
|
return parent.createCopy()
|
||||||
else:
|
else:
|
||||||
return _dict_type()
|
return _dict_type()
|
||||||
|
@ -78,6 +78,55 @@ def initVar(var, d):
|
||||||
"""Non-destructive var init for data structure"""
|
"""Non-destructive var init for data structure"""
|
||||||
d.initVar(var)
|
d.initVar(var)
|
||||||
|
|
||||||
|
|
||||||
|
def setVar(var, value, d):
|
||||||
|
"""Set a variable to a given value"""
|
||||||
|
d.setVar(var, value)
|
||||||
|
|
||||||
|
|
||||||
|
def getVar(var, d, exp = 0):
|
||||||
|
"""Gets the value of a variable"""
|
||||||
|
return d.getVar(var, exp)
|
||||||
|
|
||||||
|
|
||||||
|
def renameVar(key, newkey, d):
|
||||||
|
"""Renames a variable from key to newkey"""
|
||||||
|
d.renameVar(key, newkey)
|
||||||
|
|
||||||
|
def delVar(var, d):
|
||||||
|
"""Removes a variable from the data set"""
|
||||||
|
d.delVar(var)
|
||||||
|
|
||||||
|
def setVarFlag(var, flag, flagvalue, d):
|
||||||
|
"""Set a flag for a given variable to a given value"""
|
||||||
|
d.setVarFlag(var, flag, flagvalue)
|
||||||
|
|
||||||
|
def getVarFlag(var, flag, d):
|
||||||
|
"""Gets given flag from given var"""
|
||||||
|
return d.getVarFlag(var, flag)
|
||||||
|
|
||||||
|
def delVarFlag(var, flag, d):
|
||||||
|
"""Removes a given flag from the variable's flags"""
|
||||||
|
d.delVarFlag(var, flag)
|
||||||
|
|
||||||
|
def setVarFlags(var, flags, d):
|
||||||
|
"""Set the flags for a given variable
|
||||||
|
|
||||||
|
Note:
|
||||||
|
setVarFlags will not clear previous
|
||||||
|
flags. Think of this method as
|
||||||
|
addVarFlags
|
||||||
|
"""
|
||||||
|
d.setVarFlags(var, flags)
|
||||||
|
|
||||||
|
def getVarFlags(var, d):
|
||||||
|
"""Gets a variable's flags"""
|
||||||
|
return d.getVarFlags(var)
|
||||||
|
|
||||||
|
def delVarFlags(var, d):
|
||||||
|
"""Removes a variable's flags"""
|
||||||
|
d.delVarFlags(var)
|
||||||
|
|
||||||
def keys(d):
|
def keys(d):
|
||||||
"""Return a list of keys in d"""
|
"""Return a list of keys in d"""
|
||||||
return d.keys()
|
return d.keys()
|
||||||
|
@ -95,7 +144,7 @@ def expandKeys(alterdata, readdata = None):
|
||||||
readdata = alterdata
|
readdata = alterdata
|
||||||
|
|
||||||
todolist = {}
|
todolist = {}
|
||||||
for key in alterdata:
|
for key in keys(alterdata):
|
||||||
if not '${' in key:
|
if not '${' in key:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -106,14 +155,10 @@ def expandKeys(alterdata, readdata = None):
|
||||||
|
|
||||||
# These two for loops are split for performance to maximise the
|
# These two for loops are split for performance to maximise the
|
||||||
# usefulness of the expand cache
|
# usefulness of the expand cache
|
||||||
for key in sorted(todolist):
|
|
||||||
|
for key in todolist:
|
||||||
ekey = todolist[key]
|
ekey = todolist[key]
|
||||||
newval = alterdata.getVar(ekey, False)
|
renameVar(key, ekey, alterdata)
|
||||||
if newval is not None:
|
|
||||||
val = alterdata.getVar(key, False)
|
|
||||||
if val is not None:
|
|
||||||
bb.warn("Variable key %s (%s) replaces original key %s (%s)." % (key, val, ekey, newval))
|
|
||||||
alterdata.renameVar(key, ekey)
|
|
||||||
|
|
||||||
def inheritFromOS(d, savedenv, permitted):
|
def inheritFromOS(d, savedenv, permitted):
|
||||||
"""Inherit variables from the initial environment."""
|
"""Inherit variables from the initial environment."""
|
||||||
|
@ -121,60 +166,53 @@ def inheritFromOS(d, savedenv, permitted):
|
||||||
for s in savedenv.keys():
|
for s in savedenv.keys():
|
||||||
if s in permitted:
|
if s in permitted:
|
||||||
try:
|
try:
|
||||||
d.setVar(s, savedenv.getVar(s), op = 'from env')
|
setVar(s, getVar(s, savedenv, True), d)
|
||||||
if s in exportlist:
|
if s in exportlist:
|
||||||
d.setVarFlag(s, "export", True, op = 'auto env export')
|
setVarFlag(s, "export", True, d)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||||
"""Emit a variable to be sourced by a shell."""
|
"""Emit a variable to be sourced by a shell."""
|
||||||
func = d.getVarFlag(var, "func", False)
|
if getVarFlag(var, "python", d):
|
||||||
if d.getVarFlag(var, 'python', False) and func:
|
return 0
|
||||||
return False
|
|
||||||
|
|
||||||
export = d.getVarFlag(var, "export", False)
|
export = getVarFlag(var, "export", d)
|
||||||
unexport = d.getVarFlag(var, "unexport", False)
|
unexport = getVarFlag(var, "unexport", d)
|
||||||
|
func = getVarFlag(var, "func", d)
|
||||||
if not all and not export and not unexport and not func:
|
if not all and not export and not unexport and not func:
|
||||||
return False
|
return 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if all:
|
if all:
|
||||||
oval = d.getVar(var, False)
|
oval = getVar(var, d, 0)
|
||||||
val = d.getVar(var)
|
val = getVar(var, d, 1)
|
||||||
except (KeyboardInterrupt, bb.build.FuncFailed):
|
except (KeyboardInterrupt, bb.build.FuncFailed):
|
||||||
raise
|
raise
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
|
o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
|
||||||
return False
|
return 0
|
||||||
|
|
||||||
if all:
|
if all:
|
||||||
d.varhistory.emit(var, oval, val, o, d)
|
commentVal = re.sub('\n', '\n#', str(oval))
|
||||||
|
o.write('# %s=%s\n' % (var, commentVal))
|
||||||
|
|
||||||
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
|
if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
|
||||||
return False
|
return 0
|
||||||
|
|
||||||
varExpanded = d.expand(var)
|
varExpanded = expand(var, d)
|
||||||
|
|
||||||
if unexport:
|
if unexport:
|
||||||
o.write('unset %s\n' % varExpanded)
|
o.write('unset %s\n' % varExpanded)
|
||||||
return False
|
return 0
|
||||||
|
|
||||||
if val is None:
|
if not val:
|
||||||
return False
|
return 0
|
||||||
|
|
||||||
val = str(val)
|
val = str(val)
|
||||||
|
|
||||||
if varExpanded.startswith("BASH_FUNC_"):
|
|
||||||
varExpanded = varExpanded[10:-2]
|
|
||||||
val = val[3:] # Strip off "() "
|
|
||||||
o.write("%s() %s\n" % (varExpanded, val))
|
|
||||||
o.write("export -f %s\n" % (varExpanded))
|
|
||||||
return True
|
|
||||||
|
|
||||||
if func:
|
if func:
|
||||||
# NOTE: should probably check for unbalanced {} within the var
|
# NOTE: should probably check for unbalanced {} within the var
|
||||||
val = val.rstrip('\n')
|
|
||||||
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
|
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -183,35 +221,32 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
|
||||||
|
|
||||||
# if we're going to output this within doublequotes,
|
# if we're going to output this within doublequotes,
|
||||||
# to a shell, we need to escape the quotes in the var
|
# to a shell, we need to escape the quotes in the var
|
||||||
alter = re.sub('"', '\\"', val)
|
alter = re.sub('"', '\\"', val.strip())
|
||||||
alter = re.sub('\n', ' \\\n', alter)
|
alter = re.sub('\n', ' \\\n', alter)
|
||||||
alter = re.sub('\\$', '\\\\$', alter)
|
|
||||||
o.write('%s="%s"\n' % (varExpanded, alter))
|
o.write('%s="%s"\n' % (varExpanded, alter))
|
||||||
return False
|
return 0
|
||||||
|
|
||||||
def emit_env(o=sys.__stdout__, d = init(), all=False):
|
def emit_env(o=sys.__stdout__, d = init(), all=False):
|
||||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||||
|
|
||||||
isfunc = lambda key: bool(d.getVarFlag(key, "func", False))
|
isfunc = lambda key: bool(d.getVarFlag(key, "func"))
|
||||||
keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
|
keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
|
||||||
grouped = groupby(keys, isfunc)
|
grouped = groupby(keys, isfunc)
|
||||||
for isfunc, keys in grouped:
|
for isfunc, keys in grouped:
|
||||||
for key in sorted(keys):
|
for key in keys:
|
||||||
emit_var(key, o, d, all and not isfunc) and o.write('\n')
|
emit_var(key, o, d, all and not isfunc) and o.write('\n')
|
||||||
|
|
||||||
def exported_keys(d):
|
def exported_keys(d):
|
||||||
return (key for key in d.keys() if not key.startswith('__') and
|
return (key for key in d.keys() if not key.startswith('__') and
|
||||||
d.getVarFlag(key, 'export', False) and
|
d.getVarFlag(key, 'export') and
|
||||||
not d.getVarFlag(key, 'unexport', False))
|
not d.getVarFlag(key, 'unexport'))
|
||||||
|
|
||||||
def exported_vars(d):
|
def exported_vars(d):
|
||||||
k = list(exported_keys(d))
|
for key in exported_keys(d):
|
||||||
for key in k:
|
|
||||||
try:
|
try:
|
||||||
value = d.getVar(key)
|
value = d.getVar(key, True)
|
||||||
except Exception as err:
|
except Exception:
|
||||||
bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE"), key, err))
|
pass
|
||||||
continue
|
|
||||||
|
|
||||||
if value is not None:
|
if value is not None:
|
||||||
yield key, str(value)
|
yield key, str(value)
|
||||||
|
@ -219,150 +254,57 @@ def exported_vars(d):
|
||||||
def emit_func(func, o=sys.__stdout__, d = init()):
|
def emit_func(func, o=sys.__stdout__, d = init()):
|
||||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
||||||
|
|
||||||
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func", False))
|
keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
|
||||||
for key in sorted(keys):
|
for key in keys:
|
||||||
emit_var(key, o, d, False)
|
emit_var(key, o, d, False) and o.write('\n')
|
||||||
|
|
||||||
o.write('\n')
|
|
||||||
emit_var(func, o, d, False) and o.write('\n')
|
emit_var(func, o, d, False) and o.write('\n')
|
||||||
newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func))
|
newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True))
|
||||||
newdeps |= set((d.getVarFlag(func, "vardeps") or "").split())
|
|
||||||
seen = set()
|
seen = set()
|
||||||
while newdeps:
|
while newdeps:
|
||||||
deps = newdeps
|
deps = newdeps
|
||||||
seen |= deps
|
seen |= deps
|
||||||
newdeps = set()
|
newdeps = set()
|
||||||
for dep in deps:
|
for dep in deps:
|
||||||
if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False):
|
if bb.data.getVarFlag(dep, "func", d):
|
||||||
emit_var(dep, o, d, False) and o.write('\n')
|
emit_var(dep, o, d, False) and o.write('\n')
|
||||||
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep))
|
newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True))
|
||||||
newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
|
|
||||||
newdeps -= seen
|
|
||||||
|
|
||||||
_functionfmt = """
|
|
||||||
def {function}(d):
|
|
||||||
{body}"""
|
|
||||||
|
|
||||||
def emit_func_python(func, o=sys.__stdout__, d = init()):
|
|
||||||
"""Emits all items in the data store in a format such that it can be sourced by a shell."""
|
|
||||||
|
|
||||||
def write_func(func, o, call = False):
|
|
||||||
body = d.getVar(func, False)
|
|
||||||
if not body.startswith("def"):
|
|
||||||
body = _functionfmt.format(function=func, body=body)
|
|
||||||
|
|
||||||
o.write(body.strip() + "\n\n")
|
|
||||||
if call:
|
|
||||||
o.write(func + "(d)" + "\n\n")
|
|
||||||
|
|
||||||
write_func(func, o, True)
|
|
||||||
pp = bb.codeparser.PythonParser(func, logger)
|
|
||||||
pp.parse_python(d.getVar(func, False))
|
|
||||||
newdeps = pp.execs
|
|
||||||
newdeps |= set((d.getVarFlag(func, "vardeps") or "").split())
|
|
||||||
seen = set()
|
|
||||||
while newdeps:
|
|
||||||
deps = newdeps
|
|
||||||
seen |= deps
|
|
||||||
newdeps = set()
|
|
||||||
for dep in deps:
|
|
||||||
if d.getVarFlag(dep, "func", False) and d.getVarFlag(dep, "python", False):
|
|
||||||
write_func(dep, o)
|
|
||||||
pp = bb.codeparser.PythonParser(dep, logger)
|
|
||||||
pp.parse_python(d.getVar(dep, False))
|
|
||||||
newdeps |= pp.execs
|
|
||||||
newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
|
|
||||||
newdeps -= seen
|
newdeps -= seen
|
||||||
|
|
||||||
def update_data(d):
|
def update_data(d):
|
||||||
"""Performs final steps upon the datastore, including application of overrides"""
|
"""Performs final steps upon the datastore, including application of overrides"""
|
||||||
d.finalize(parent = True)
|
d.finalize()
|
||||||
|
|
||||||
def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
def build_dependencies(key, keys, shelldeps, vardepvals, d):
|
||||||
deps = set()
|
deps = set()
|
||||||
|
vardeps = d.getVarFlag(key, "vardeps", True)
|
||||||
try:
|
try:
|
||||||
if key[-1] == ']':
|
|
||||||
vf = key[:-1].split('[')
|
|
||||||
value = d.getVarFlag(vf[0], vf[1], False)
|
|
||||||
parser = d.expandWithRefs(value, key)
|
|
||||||
deps |= parser.references
|
|
||||||
deps = deps | (keys & parser.execs)
|
|
||||||
return deps, value
|
|
||||||
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
|
|
||||||
vardeps = varflags.get("vardeps")
|
|
||||||
value = d.getVar(key, False)
|
value = d.getVar(key, False)
|
||||||
|
if key in vardepvals:
|
||||||
def handle_contains(value, contains, d):
|
value = d.getVarFlag(key, "vardepvalue", True)
|
||||||
newvalue = ""
|
elif d.getVarFlag(key, "func"):
|
||||||
for k in sorted(contains):
|
if d.getVarFlag(key, "python"):
|
||||||
l = (d.getVar(k) or "").split()
|
parsedvar = d.expandWithRefs(value, key)
|
||||||
for item in sorted(contains[k]):
|
|
||||||
for word in item.split():
|
|
||||||
if not word in l:
|
|
||||||
newvalue += "\n%s{%s} = Unset" % (k, item)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
newvalue += "\n%s{%s} = Set" % (k, item)
|
|
||||||
if not newvalue:
|
|
||||||
return value
|
|
||||||
if not value:
|
|
||||||
return newvalue
|
|
||||||
return value + newvalue
|
|
||||||
|
|
||||||
if "vardepvalue" in varflags:
|
|
||||||
value = varflags.get("vardepvalue")
|
|
||||||
elif varflags.get("func"):
|
|
||||||
if varflags.get("python"):
|
|
||||||
parser = bb.codeparser.PythonParser(key, logger)
|
parser = bb.codeparser.PythonParser(key, logger)
|
||||||
if value and "\t" in value:
|
parser.parse_python(parsedvar.value)
|
||||||
logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE")))
|
|
||||||
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
|
|
||||||
deps = deps | parser.references
|
deps = deps | parser.references
|
||||||
deps = deps | (keys & parser.execs)
|
|
||||||
value = handle_contains(value, parser.contains, d)
|
|
||||||
else:
|
else:
|
||||||
parsedvar = d.expandWithRefs(value, key)
|
parsedvar = d.expandWithRefs(value, key)
|
||||||
parser = bb.codeparser.ShellParser(key, logger)
|
parser = bb.codeparser.ShellParser(key, logger)
|
||||||
parser.parse_shell(parsedvar.value)
|
parser.parse_shell(parsedvar.value)
|
||||||
deps = deps | shelldeps
|
deps = deps | shelldeps
|
||||||
deps = deps | parsedvar.references
|
|
||||||
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
|
|
||||||
value = handle_contains(value, parsedvar.contains, d)
|
|
||||||
if vardeps is None:
|
if vardeps is None:
|
||||||
parser.log.flush()
|
parser.log.flush()
|
||||||
if "prefuncs" in varflags:
|
deps = deps | parsedvar.references
|
||||||
deps = deps | set(varflags["prefuncs"].split())
|
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
|
||||||
if "postfuncs" in varflags:
|
|
||||||
deps = deps | set(varflags["postfuncs"].split())
|
|
||||||
if "exports" in varflags:
|
|
||||||
deps = deps | set(varflags["exports"].split())
|
|
||||||
else:
|
else:
|
||||||
parser = d.expandWithRefs(value, key)
|
parser = d.expandWithRefs(value, key)
|
||||||
deps |= parser.references
|
deps |= parser.references
|
||||||
deps = deps | (keys & parser.execs)
|
deps = deps | (keys & parser.execs)
|
||||||
value = handle_contains(value, parser.contains, d)
|
|
||||||
|
|
||||||
if "vardepvalueexclude" in varflags:
|
|
||||||
exclude = varflags.get("vardepvalueexclude")
|
|
||||||
for excl in exclude.split('|'):
|
|
||||||
if excl:
|
|
||||||
value = value.replace(excl, '')
|
|
||||||
|
|
||||||
# Add varflags, assuming an exclusion list is set
|
|
||||||
if varflagsexcl:
|
|
||||||
varfdeps = []
|
|
||||||
for f in varflags:
|
|
||||||
if f not in varflagsexcl:
|
|
||||||
varfdeps.append('%s[%s]' % (key, f))
|
|
||||||
if varfdeps:
|
|
||||||
deps |= set(varfdeps)
|
|
||||||
|
|
||||||
deps |= set((vardeps or "").split())
|
deps |= set((vardeps or "").split())
|
||||||
deps -= set(varflags.get("vardepsexclude", "").split())
|
deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
|
||||||
except bb.parse.SkipRecipe:
|
except:
|
||||||
raise
|
bb.note("Error expanding variable %s" % key)
|
||||||
except Exception as e:
|
|
||||||
bb.warn("Exception during build_dependencies for %s" % key)
|
|
||||||
raise
|
raise
|
||||||
return deps, value
|
return deps, value
|
||||||
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
|
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
|
||||||
|
@ -370,16 +312,16 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
|
||||||
|
|
||||||
def generate_dependencies(d):
|
def generate_dependencies(d):
|
||||||
|
|
||||||
keys = set(key for key in d if not key.startswith("__"))
|
keys = set(key for key in d.keys() if not key.startswith("__"))
|
||||||
shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
|
shelldeps = set(key for key in keys if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
|
||||||
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
|
vardepvals = set(key for key in keys if d.getVarFlag(key, "vardepvalue"))
|
||||||
|
|
||||||
deps = {}
|
deps = {}
|
||||||
values = {}
|
values = {}
|
||||||
|
|
||||||
tasklist = d.getVar('__BBTASKS', False) or []
|
tasklist = bb.data.getVar('__BBTASKS', d) or []
|
||||||
for task in tasklist:
|
for task in tasklist:
|
||||||
deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
|
deps[task], values[task] = build_dependencies(task, keys, shelldeps, vardepvals, d)
|
||||||
newdeps = deps[task]
|
newdeps = deps[task]
|
||||||
seen = set()
|
seen = set()
|
||||||
while newdeps:
|
while newdeps:
|
||||||
|
@ -388,16 +330,14 @@ def generate_dependencies(d):
|
||||||
newdeps = set()
|
newdeps = set()
|
||||||
for dep in nextdeps:
|
for dep in nextdeps:
|
||||||
if dep not in deps:
|
if dep not in deps:
|
||||||
deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d)
|
deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, vardepvals, d)
|
||||||
newdeps |= deps[dep]
|
newdeps |= deps[dep]
|
||||||
newdeps -= seen
|
newdeps -= seen
|
||||||
#print "For %s: %s" % (task, str(deps[task]))
|
#print "For %s: %s" % (task, str(taskdeps[task]))
|
||||||
return tasklist, deps, values
|
return tasklist, deps, values
|
||||||
|
|
||||||
def inherits_class(klass, d):
|
def inherits_class(klass, d):
|
||||||
val = d.getVar('__inherit_cache', False) or []
|
val = getVar('__inherit_cache', d) or []
|
||||||
needle = os.path.join('classes', '%s.bbclass' % klass)
|
if os.path.join('classes', '%s.bbclass' % klass) in val:
|
||||||
for v in val:
|
return True
|
||||||
if v.endswith(needle):
|
|
||||||
return True
|
|
||||||
return False
|
return False
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -24,21 +24,19 @@ BitBake build tools.
|
||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
import warnings
|
import warnings
|
||||||
import pickle
|
try:
|
||||||
|
import cPickle as pickle
|
||||||
|
except ImportError:
|
||||||
|
import pickle
|
||||||
import logging
|
import logging
|
||||||
import atexit
|
import atexit
|
||||||
import traceback
|
import traceback
|
||||||
import ast
|
|
||||||
import threading
|
|
||||||
|
|
||||||
import bb.utils
|
import bb.utils
|
||||||
import bb.compat
|
|
||||||
import bb.exceptions
|
|
||||||
|
|
||||||
# This is the pid for which we should generate the event. This is set when
|
# This is the pid for which we should generate the event. This is set when
|
||||||
# the runqueue forks off.
|
# the runqueue forks off.
|
||||||
worker_pid = 0
|
worker_pid = 0
|
||||||
worker_fire = None
|
worker_pipe = None
|
||||||
|
|
||||||
logger = logging.getLogger('BitBake.Event')
|
logger = logging.getLogger('BitBake.Event')
|
||||||
|
|
||||||
|
@ -48,63 +46,26 @@ class Event(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.pid = worker_pid
|
self.pid = worker_pid
|
||||||
|
|
||||||
|
NotHandled = 0
|
||||||
class HeartbeatEvent(Event):
|
Handled = 1
|
||||||
"""Triggered at regular time intervals of 10 seconds. Other events can fire much more often
|
|
||||||
(runQueueTaskStarted when there are many short tasks) or not at all for long periods
|
|
||||||
of time (again runQueueTaskStarted, when there is just one long-running task), so this
|
|
||||||
event is more suitable for doing some task-independent work occassionally."""
|
|
||||||
def __init__(self, time):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.time = time
|
|
||||||
|
|
||||||
Registered = 10
|
Registered = 10
|
||||||
AlreadyRegistered = 14
|
AlreadyRegistered = 14
|
||||||
|
|
||||||
def get_class_handlers():
|
|
||||||
return _handlers
|
|
||||||
|
|
||||||
def set_class_handlers(h):
|
|
||||||
global _handlers
|
|
||||||
_handlers = h
|
|
||||||
|
|
||||||
def clean_class_handlers():
|
|
||||||
return bb.compat.OrderedDict()
|
|
||||||
|
|
||||||
# Internal
|
# Internal
|
||||||
_handlers = clean_class_handlers()
|
_handlers = {}
|
||||||
_ui_handlers = {}
|
_ui_handlers = {}
|
||||||
_ui_logfilters = {}
|
|
||||||
_ui_handler_seq = 0
|
_ui_handler_seq = 0
|
||||||
_event_handler_map = {}
|
|
||||||
_catchall_handlers = {}
|
|
||||||
_eventfilter = None
|
|
||||||
_uiready = False
|
|
||||||
_thread_lock = threading.Lock()
|
|
||||||
_thread_lock_enabled = False
|
|
||||||
|
|
||||||
if hasattr(__builtins__, '__setitem__'):
|
# For compatibility
|
||||||
builtins = __builtins__
|
bb.utils._context["NotHandled"] = NotHandled
|
||||||
else:
|
bb.utils._context["Handled"] = Handled
|
||||||
builtins = __builtins__.__dict__
|
|
||||||
|
|
||||||
def enable_threadlock():
|
|
||||||
global _thread_lock_enabled
|
|
||||||
_thread_lock_enabled = True
|
|
||||||
|
|
||||||
def disable_threadlock():
|
|
||||||
global _thread_lock_enabled
|
|
||||||
_thread_lock_enabled = False
|
|
||||||
|
|
||||||
def execute_handler(name, handler, event, d):
|
def execute_handler(name, handler, event, d):
|
||||||
event.data = d
|
event.data = d
|
||||||
addedd = False
|
|
||||||
if 'd' not in builtins:
|
|
||||||
builtins['d'] = d
|
|
||||||
addedd = True
|
|
||||||
try:
|
try:
|
||||||
ret = handler(event)
|
ret = handler(event)
|
||||||
except (bb.parse.SkipRecipe, bb.BBHandledException):
|
except bb.parse.SkipPackage:
|
||||||
raise
|
raise
|
||||||
except Exception:
|
except Exception:
|
||||||
etype, value, tb = sys.exc_info()
|
etype, value, tb = sys.exc_info()
|
||||||
|
@ -117,21 +78,20 @@ def execute_handler(name, handler, event, d):
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
del event.data
|
del event.data
|
||||||
if addedd:
|
|
||||||
del builtins['d']
|
if ret is not None:
|
||||||
|
warnings.warn("Using Handled/NotHandled in event handlers is deprecated",
|
||||||
|
DeprecationWarning, stacklevel = 2)
|
||||||
|
|
||||||
def fire_class_handlers(event, d):
|
def fire_class_handlers(event, d):
|
||||||
if isinstance(event, logging.LogRecord):
|
if isinstance(event, logging.LogRecord):
|
||||||
return
|
return
|
||||||
|
|
||||||
eid = str(event.__class__)[8:-2]
|
for name, handler in _handlers.iteritems():
|
||||||
evt_hmap = _event_handler_map.get(eid, {})
|
try:
|
||||||
for name, handler in list(_handlers.items()):
|
|
||||||
if name in _catchall_handlers or name in evt_hmap:
|
|
||||||
if _eventfilter:
|
|
||||||
if not _eventfilter(name, handler, event, d):
|
|
||||||
continue
|
|
||||||
execute_handler(name, handler, event, d)
|
execute_handler(name, handler, event, d)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
ui_queue = []
|
ui_queue = []
|
||||||
@atexit.register
|
@atexit.register
|
||||||
|
@ -139,52 +99,25 @@ def print_ui_queue():
|
||||||
"""If we're exiting before a UI has been spawned, display any queued
|
"""If we're exiting before a UI has been spawned, display any queued
|
||||||
LogRecords to the console."""
|
LogRecords to the console."""
|
||||||
logger = logging.getLogger("BitBake")
|
logger = logging.getLogger("BitBake")
|
||||||
if not _uiready:
|
if not _ui_handlers:
|
||||||
from bb.msg import BBLogFormatter
|
from bb.msg import BBLogFormatter
|
||||||
stdout = logging.StreamHandler(sys.stdout)
|
console = logging.StreamHandler(sys.stdout)
|
||||||
stderr = logging.StreamHandler(sys.stderr)
|
console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
|
||||||
formatter = BBLogFormatter("%(levelname)s: %(message)s")
|
logger.handlers = [console]
|
||||||
stdout.setFormatter(formatter)
|
for event in ui_queue:
|
||||||
stderr.setFormatter(formatter)
|
|
||||||
|
|
||||||
# First check to see if we have any proper messages
|
|
||||||
msgprint = False
|
|
||||||
for event in ui_queue[:]:
|
|
||||||
if isinstance(event, logging.LogRecord):
|
|
||||||
if event.levelno > logging.DEBUG:
|
|
||||||
if event.levelno >= logging.WARNING:
|
|
||||||
logger.addHandler(stderr)
|
|
||||||
else:
|
|
||||||
logger.addHandler(stdout)
|
|
||||||
logger.handle(event)
|
|
||||||
msgprint = True
|
|
||||||
if msgprint:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Nope, so just print all of the messages we have (including debug messages)
|
|
||||||
logger.addHandler(stdout)
|
|
||||||
for event in ui_queue[:]:
|
|
||||||
if isinstance(event, logging.LogRecord):
|
if isinstance(event, logging.LogRecord):
|
||||||
logger.handle(event)
|
logger.handle(event)
|
||||||
|
|
||||||
def fire_ui_handlers(event, d):
|
def fire_ui_handlers(event, d):
|
||||||
global _thread_lock
|
if not _ui_handlers:
|
||||||
global _thread_lock_enabled
|
|
||||||
|
|
||||||
if not _uiready:
|
|
||||||
# No UI handlers registered yet, queue up the messages
|
# No UI handlers registered yet, queue up the messages
|
||||||
ui_queue.append(event)
|
ui_queue.append(event)
|
||||||
return
|
return
|
||||||
|
|
||||||
if _thread_lock_enabled:
|
|
||||||
_thread_lock.acquire()
|
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for h in _ui_handlers:
|
for h in _ui_handlers:
|
||||||
#print "Sending event %s" % event
|
#print "Sending event %s" % event
|
||||||
try:
|
try:
|
||||||
if not _ui_logfilters[h].filter(event):
|
|
||||||
continue
|
|
||||||
# We use pickle here since it better handles object instances
|
# We use pickle here since it better handles object instances
|
||||||
# which xmlrpc's marshaller does not. Events *must* be serializable
|
# which xmlrpc's marshaller does not. Events *must* be serializable
|
||||||
# by pickle.
|
# by pickle.
|
||||||
|
@ -197,9 +130,6 @@ def fire_ui_handlers(event, d):
|
||||||
for h in errors:
|
for h in errors:
|
||||||
del _ui_handlers[h]
|
del _ui_handlers[h]
|
||||||
|
|
||||||
if _thread_lock_enabled:
|
|
||||||
_thread_lock.release()
|
|
||||||
|
|
||||||
def fire(event, d):
|
def fire(event, d):
|
||||||
"""Fire off an Event"""
|
"""Fire off an Event"""
|
||||||
|
|
||||||
|
@ -209,16 +139,24 @@ def fire(event, d):
|
||||||
# don't have a datastore so the datastore context isn't a problem.
|
# don't have a datastore so the datastore context isn't a problem.
|
||||||
|
|
||||||
fire_class_handlers(event, d)
|
fire_class_handlers(event, d)
|
||||||
if worker_fire:
|
if worker_pid != 0:
|
||||||
worker_fire(event, d)
|
worker_fire(event, d)
|
||||||
else:
|
else:
|
||||||
fire_ui_handlers(event, d)
|
fire_ui_handlers(event, d)
|
||||||
|
|
||||||
|
def worker_fire(event, d):
|
||||||
|
data = "<event>" + pickle.dumps(event) + "</event>"
|
||||||
|
worker_pipe.write(data)
|
||||||
|
|
||||||
def fire_from_worker(event, d):
|
def fire_from_worker(event, d):
|
||||||
|
if not event.startswith("<event>") or not event.endswith("</event>"):
|
||||||
|
print("Error, not an event %s" % event)
|
||||||
|
return
|
||||||
|
event = pickle.loads(event[7:-8])
|
||||||
fire_ui_handlers(event, d)
|
fire_ui_handlers(event, d)
|
||||||
|
|
||||||
noop = lambda _: None
|
noop = lambda _: None
|
||||||
def register(name, handler, mask=None, filename=None, lineno=None):
|
def register(name, handler):
|
||||||
"""Register an Event handler"""
|
"""Register an Event handler"""
|
||||||
|
|
||||||
# already registered
|
# already registered
|
||||||
|
@ -227,63 +165,31 @@ def register(name, handler, mask=None, filename=None, lineno=None):
|
||||||
|
|
||||||
if handler is not None:
|
if handler is not None:
|
||||||
# handle string containing python code
|
# handle string containing python code
|
||||||
if isinstance(handler, str):
|
if isinstance(handler, basestring):
|
||||||
tmp = "def %s(e):\n%s" % (name, handler)
|
tmp = "def %s(e):\n%s" % (name, handler)
|
||||||
try:
|
try:
|
||||||
code = bb.methodpool.compile_cache(tmp)
|
code = compile(tmp, "%s(e)" % name, "exec")
|
||||||
if not code:
|
|
||||||
if filename is None:
|
|
||||||
filename = "%s(e)" % name
|
|
||||||
code = compile(tmp, filename, "exec", ast.PyCF_ONLY_AST)
|
|
||||||
if lineno is not None:
|
|
||||||
ast.increment_lineno(code, lineno-1)
|
|
||||||
code = compile(code, filename, "exec")
|
|
||||||
bb.methodpool.compile_cache_add(tmp, code)
|
|
||||||
except SyntaxError:
|
except SyntaxError:
|
||||||
logger.error("Unable to register event handler '%s':\n%s", name,
|
logger.error("Unable to register event handler '%s':\n%s", name,
|
||||||
''.join(traceback.format_exc(limit=0)))
|
''.join(traceback.format_exc(limit=0)))
|
||||||
_handlers[name] = noop
|
_handlers[name] = noop
|
||||||
return
|
return
|
||||||
env = {}
|
env = {}
|
||||||
bb.utils.better_exec(code, env)
|
bb.utils.simple_exec(code, env)
|
||||||
func = bb.utils.better_eval(name, env)
|
func = bb.utils.better_eval(name, env)
|
||||||
_handlers[name] = func
|
_handlers[name] = func
|
||||||
else:
|
else:
|
||||||
_handlers[name] = handler
|
_handlers[name] = handler
|
||||||
|
|
||||||
if not mask or '*' in mask:
|
|
||||||
_catchall_handlers[name] = True
|
|
||||||
else:
|
|
||||||
for m in mask:
|
|
||||||
if _event_handler_map.get(m, None) is None:
|
|
||||||
_event_handler_map[m] = {}
|
|
||||||
_event_handler_map[m][name] = True
|
|
||||||
|
|
||||||
return Registered
|
return Registered
|
||||||
|
|
||||||
def remove(name, handler):
|
def remove(name, handler):
|
||||||
"""Remove an Event handler"""
|
"""Remove an Event handler"""
|
||||||
_handlers.pop(name)
|
_handlers.pop(name)
|
||||||
|
|
||||||
def get_handlers():
|
def register_UIHhandler(handler):
|
||||||
return _handlers
|
|
||||||
|
|
||||||
def set_handlers(handlers):
|
|
||||||
global _handlers
|
|
||||||
_handlers = handlers
|
|
||||||
|
|
||||||
def set_eventfilter(func):
|
|
||||||
global _eventfilter
|
|
||||||
_eventfilter = func
|
|
||||||
|
|
||||||
def register_UIHhandler(handler, mainui=False):
|
|
||||||
if mainui:
|
|
||||||
global _uiready
|
|
||||||
_uiready = True
|
|
||||||
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
|
||||||
_ui_handlers[_ui_handler_seq] = handler
|
_ui_handlers[_ui_handler_seq] = handler
|
||||||
level, debug_domains = bb.msg.constructLogOptions()
|
|
||||||
_ui_logfilters[_ui_handler_seq] = UIEventFilter(level, debug_domains)
|
|
||||||
return _ui_handler_seq
|
return _ui_handler_seq
|
||||||
|
|
||||||
def unregister_UIHhandler(handlerNum):
|
def unregister_UIHhandler(handlerNum):
|
||||||
|
@ -291,37 +197,6 @@ def unregister_UIHhandler(handlerNum):
|
||||||
del _ui_handlers[handlerNum]
|
del _ui_handlers[handlerNum]
|
||||||
return
|
return
|
||||||
|
|
||||||
# Class to allow filtering of events and specific filtering of LogRecords *before* we put them over the IPC
|
|
||||||
class UIEventFilter(object):
|
|
||||||
def __init__(self, level, debug_domains):
|
|
||||||
self.update(None, level, debug_domains)
|
|
||||||
|
|
||||||
def update(self, eventmask, level, debug_domains):
|
|
||||||
self.eventmask = eventmask
|
|
||||||
self.stdlevel = level
|
|
||||||
self.debug_domains = debug_domains
|
|
||||||
|
|
||||||
def filter(self, event):
|
|
||||||
if isinstance(event, logging.LogRecord):
|
|
||||||
if event.levelno >= self.stdlevel:
|
|
||||||
return True
|
|
||||||
if event.name in self.debug_domains and event.levelno >= self.debug_domains[event.name]:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
eid = str(event.__class__)[8:-2]
|
|
||||||
if self.eventmask and eid not in self.eventmask:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def set_UIHmask(handlerNum, level, debug_domains, mask):
|
|
||||||
if not handlerNum in _ui_handlers:
|
|
||||||
return False
|
|
||||||
if '*' in mask:
|
|
||||||
_ui_logfilters[handlerNum].update(None, level, debug_domains)
|
|
||||||
else:
|
|
||||||
_ui_logfilters[handlerNum].update(mask, level, debug_domains)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def getName(e):
|
def getName(e):
|
||||||
"""Returns the name of a class or class instance"""
|
"""Returns the name of a class or class instance"""
|
||||||
if getattr(e, "__name__", None) == None:
|
if getattr(e, "__name__", None) == None:
|
||||||
|
@ -329,27 +204,6 @@ def getName(e):
|
||||||
else:
|
else:
|
||||||
return e.__name__
|
return e.__name__
|
||||||
|
|
||||||
class OperationStarted(Event):
|
|
||||||
"""An operation has begun"""
|
|
||||||
def __init__(self, msg = "Operation Started"):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.msg = msg
|
|
||||||
|
|
||||||
class OperationCompleted(Event):
|
|
||||||
"""An operation has completed"""
|
|
||||||
def __init__(self, total, msg = "Operation Completed"):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.total = total
|
|
||||||
self.msg = msg
|
|
||||||
|
|
||||||
class OperationProgress(Event):
|
|
||||||
"""An operation is in progress"""
|
|
||||||
def __init__(self, current, total, msg = "Operation in Progress"):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.current = current
|
|
||||||
self.total = total
|
|
||||||
self.msg = msg + ": %s/%s" % (current, total);
|
|
||||||
|
|
||||||
class ConfigParsed(Event):
|
class ConfigParsed(Event):
|
||||||
"""Configuration Parsing Complete"""
|
"""Configuration Parsing Complete"""
|
||||||
|
|
||||||
|
@ -361,17 +215,6 @@ class RecipeEvent(Event):
|
||||||
class RecipePreFinalise(RecipeEvent):
|
class RecipePreFinalise(RecipeEvent):
|
||||||
""" Recipe Parsing Complete but not yet finialised"""
|
""" Recipe Parsing Complete but not yet finialised"""
|
||||||
|
|
||||||
class RecipeTaskPreProcess(RecipeEvent):
|
|
||||||
"""
|
|
||||||
Recipe Tasks about to be finalised
|
|
||||||
The list of tasks should be final at this point and handlers
|
|
||||||
are only able to change interdependencies
|
|
||||||
"""
|
|
||||||
def __init__(self, fn, tasklist):
|
|
||||||
self.fn = fn
|
|
||||||
self.tasklist = tasklist
|
|
||||||
Event.__init__(self)
|
|
||||||
|
|
||||||
class RecipeParsed(RecipeEvent):
|
class RecipeParsed(RecipeEvent):
|
||||||
""" Recipe Parsing Complete """
|
""" Recipe Parsing Complete """
|
||||||
|
|
||||||
|
@ -393,7 +236,7 @@ class StampUpdate(Event):
|
||||||
targets = property(getTargets)
|
targets = property(getTargets)
|
||||||
|
|
||||||
class BuildBase(Event):
|
class BuildBase(Event):
|
||||||
"""Base class for bitbake build events"""
|
"""Base class for bbmake run events"""
|
||||||
|
|
||||||
def __init__(self, n, p, failures = 0):
|
def __init__(self, n, p, failures = 0):
|
||||||
self._name = n
|
self._name = n
|
||||||
|
@ -431,64 +274,27 @@ class BuildBase(Event):
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class BuildInit(BuildBase):
|
|
||||||
"""buildFile or buildTargets was invoked"""
|
|
||||||
def __init__(self, p=[]):
|
|
||||||
name = None
|
|
||||||
BuildBase.__init__(self, name, p)
|
|
||||||
|
|
||||||
class BuildStarted(BuildBase, OperationStarted):
|
|
||||||
"""Event when builds start"""
|
|
||||||
def __init__(self, n, p, failures = 0):
|
|
||||||
OperationStarted.__init__(self, "Building Started")
|
|
||||||
BuildBase.__init__(self, n, p, failures)
|
|
||||||
|
|
||||||
class BuildCompleted(BuildBase, OperationCompleted):
|
class BuildStarted(BuildBase):
|
||||||
"""Event when builds have completed"""
|
"""bbmake build run started"""
|
||||||
def __init__(self, total, n, p, failures=0, interrupted=0):
|
|
||||||
if not failures:
|
|
||||||
OperationCompleted.__init__(self, total, "Building Succeeded")
|
class BuildCompleted(BuildBase):
|
||||||
else:
|
"""bbmake build run completed"""
|
||||||
OperationCompleted.__init__(self, total, "Building Failed")
|
|
||||||
self._interrupted = interrupted
|
|
||||||
BuildBase.__init__(self, n, p, failures)
|
|
||||||
|
|
||||||
class DiskFull(Event):
|
|
||||||
"""Disk full case build aborted"""
|
|
||||||
def __init__(self, dev, type, freespace, mountpoint):
|
|
||||||
Event.__init__(self)
|
|
||||||
self._dev = dev
|
|
||||||
self._type = type
|
|
||||||
self._free = freespace
|
|
||||||
self._mountpoint = mountpoint
|
|
||||||
|
|
||||||
class DiskUsageSample:
|
|
||||||
def __init__(self, available_bytes, free_bytes, total_bytes):
|
|
||||||
# Number of bytes available to non-root processes.
|
|
||||||
self.available_bytes = available_bytes
|
|
||||||
# Number of bytes available to root processes.
|
|
||||||
self.free_bytes = free_bytes
|
|
||||||
# Total capacity of the volume.
|
|
||||||
self.total_bytes = total_bytes
|
|
||||||
|
|
||||||
class MonitorDiskEvent(Event):
|
|
||||||
"""If BB_DISKMON_DIRS is set, then this event gets triggered each time disk space is checked.
|
|
||||||
Provides information about devices that are getting monitored."""
|
|
||||||
def __init__(self, disk_usage):
|
|
||||||
Event.__init__(self)
|
|
||||||
# hash of device root path -> DiskUsageSample
|
|
||||||
self.disk_usage = disk_usage
|
|
||||||
|
|
||||||
class NoProvider(Event):
|
class NoProvider(Event):
|
||||||
"""No Provider for an Event"""
|
"""No Provider for an Event"""
|
||||||
|
|
||||||
def __init__(self, item, runtime=False, dependees=None, reasons=None, close_matches=None):
|
def __init__(self, item, runtime=False, dependees=None, reasons=[]):
|
||||||
Event.__init__(self)
|
Event.__init__(self)
|
||||||
self._item = item
|
self._item = item
|
||||||
self._runtime = runtime
|
self._runtime = runtime
|
||||||
self._dependees = dependees
|
self._dependees = dependees
|
||||||
self._reasons = reasons
|
self._reasons = reasons
|
||||||
self._close_matches = close_matches
|
|
||||||
|
|
||||||
def getItem(self):
|
def getItem(self):
|
||||||
return self._item
|
return self._item
|
||||||
|
@ -523,16 +329,17 @@ class MultipleProviders(Event):
|
||||||
"""
|
"""
|
||||||
return self._candidates
|
return self._candidates
|
||||||
|
|
||||||
class ParseStarted(OperationStarted):
|
class ParseStarted(Event):
|
||||||
"""Recipe parsing for the runqueue has begun"""
|
"""Recipe parsing for the runqueue has begun"""
|
||||||
def __init__(self, total):
|
def __init__(self, total):
|
||||||
OperationStarted.__init__(self, "Recipe parsing Started")
|
Event.__init__(self)
|
||||||
self.total = total
|
self.total = total
|
||||||
|
|
||||||
class ParseCompleted(OperationCompleted):
|
class ParseCompleted(Event):
|
||||||
"""Recipe parsing for the runqueue has completed"""
|
"""Recipe parsing for the runqueue has completed"""
|
||||||
|
|
||||||
def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
|
def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
|
||||||
OperationCompleted.__init__(self, total, "Recipe parsing Completed")
|
Event.__init__(self)
|
||||||
self.cached = cached
|
self.cached = cached
|
||||||
self.parsed = parsed
|
self.parsed = parsed
|
||||||
self.skipped = skipped
|
self.skipped = skipped
|
||||||
|
@ -540,44 +347,33 @@ class ParseCompleted(OperationCompleted):
|
||||||
self.masked = masked
|
self.masked = masked
|
||||||
self.errors = errors
|
self.errors = errors
|
||||||
self.sofar = cached + parsed
|
self.sofar = cached + parsed
|
||||||
|
|
||||||
class ParseProgress(OperationProgress):
|
|
||||||
"""Recipe parsing progress"""
|
|
||||||
def __init__(self, current, total):
|
|
||||||
OperationProgress.__init__(self, current, total, "Recipe parsing")
|
|
||||||
|
|
||||||
|
|
||||||
class CacheLoadStarted(OperationStarted):
|
|
||||||
"""Loading of the dependency cache has begun"""
|
|
||||||
def __init__(self, total):
|
|
||||||
OperationStarted.__init__(self, "Loading cache Started")
|
|
||||||
self.total = total
|
self.total = total
|
||||||
|
|
||||||
class CacheLoadProgress(OperationProgress):
|
class ParseProgress(Event):
|
||||||
"""Cache loading progress"""
|
"""Recipe parsing progress"""
|
||||||
def __init__(self, current, total):
|
|
||||||
OperationProgress.__init__(self, current, total, "Loading cache")
|
|
||||||
|
|
||||||
class CacheLoadCompleted(OperationCompleted):
|
def __init__(self, current):
|
||||||
|
self.current = current
|
||||||
|
|
||||||
|
class CacheLoadStarted(Event):
|
||||||
|
"""Loading of the dependency cache has begun"""
|
||||||
|
def __init__(self, total):
|
||||||
|
Event.__init__(self)
|
||||||
|
self.total = total
|
||||||
|
|
||||||
|
class CacheLoadProgress(Event):
|
||||||
|
"""Cache loading progress"""
|
||||||
|
def __init__(self, current):
|
||||||
|
Event.__init__(self)
|
||||||
|
self.current = current
|
||||||
|
|
||||||
|
class CacheLoadCompleted(Event):
|
||||||
"""Cache loading is complete"""
|
"""Cache loading is complete"""
|
||||||
def __init__(self, total, num_entries):
|
def __init__(self, total, num_entries):
|
||||||
OperationCompleted.__init__(self, total, "Loading cache Completed")
|
Event.__init__(self)
|
||||||
|
self.total = total
|
||||||
self.num_entries = num_entries
|
self.num_entries = num_entries
|
||||||
|
|
||||||
class TreeDataPreparationStarted(OperationStarted):
|
|
||||||
"""Tree data preparation started"""
|
|
||||||
def __init__(self):
|
|
||||||
OperationStarted.__init__(self, "Preparing tree data Started")
|
|
||||||
|
|
||||||
class TreeDataPreparationProgress(OperationProgress):
|
|
||||||
"""Tree data preparation is in progress"""
|
|
||||||
def __init__(self, current, total):
|
|
||||||
OperationProgress.__init__(self, current, total, "Preparing tree data")
|
|
||||||
|
|
||||||
class TreeDataPreparationCompleted(OperationCompleted):
|
|
||||||
"""Tree data preparation completed"""
|
|
||||||
def __init__(self, total):
|
|
||||||
OperationCompleted.__init__(self, total, "Preparing tree data Completed")
|
|
||||||
|
|
||||||
class DepTreeGenerated(Event):
|
class DepTreeGenerated(Event):
|
||||||
"""
|
"""
|
||||||
|
@ -596,16 +392,6 @@ class TargetsTreeGenerated(Event):
|
||||||
Event.__init__(self)
|
Event.__init__(self)
|
||||||
self._model = model
|
self._model = model
|
||||||
|
|
||||||
class ReachableStamps(Event):
|
|
||||||
"""
|
|
||||||
An event listing all stamps reachable after parsing
|
|
||||||
which the metadata may use to clean up stale data
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, stamps):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.stamps = stamps
|
|
||||||
|
|
||||||
class FilesMatchingFound(Event):
|
class FilesMatchingFound(Event):
|
||||||
"""
|
"""
|
||||||
Event when a list of files matching the supplied pattern has
|
Event when a list of files matching the supplied pattern has
|
||||||
|
@ -616,14 +402,6 @@ class FilesMatchingFound(Event):
|
||||||
self._pattern = pattern
|
self._pattern = pattern
|
||||||
self._matches = matches
|
self._matches = matches
|
||||||
|
|
||||||
class CoreBaseFilesFound(Event):
|
|
||||||
"""
|
|
||||||
Event when a list of appropriate config files has been generated
|
|
||||||
"""
|
|
||||||
def __init__(self, paths):
|
|
||||||
Event.__init__(self)
|
|
||||||
self._paths = paths
|
|
||||||
|
|
||||||
class ConfigFilesFound(Event):
|
class ConfigFilesFound(Event):
|
||||||
"""
|
"""
|
||||||
Event when a list of appropriate config files has been generated
|
Event when a list of appropriate config files has been generated
|
||||||
|
@ -666,15 +444,6 @@ class MsgFatal(MsgBase):
|
||||||
class MsgPlain(MsgBase):
|
class MsgPlain(MsgBase):
|
||||||
"""General output"""
|
"""General output"""
|
||||||
|
|
||||||
class LogExecTTY(Event):
|
|
||||||
"""Send event containing program to spawn on tty of the logger"""
|
|
||||||
def __init__(self, msg, prog, sleep_delay, retries):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.msg = msg
|
|
||||||
self.prog = prog
|
|
||||||
self.sleep_delay = sleep_delay
|
|
||||||
self.retries = retries
|
|
||||||
|
|
||||||
class LogHandler(logging.Handler):
|
class LogHandler(logging.Handler):
|
||||||
"""Dispatch logging messages as bitbake events"""
|
"""Dispatch logging messages as bitbake events"""
|
||||||
|
|
||||||
|
@ -683,104 +452,10 @@ class LogHandler(logging.Handler):
|
||||||
etype, value, tb = record.exc_info
|
etype, value, tb = record.exc_info
|
||||||
if hasattr(tb, 'tb_next'):
|
if hasattr(tb, 'tb_next'):
|
||||||
tb = list(bb.exceptions.extract_traceback(tb, context=3))
|
tb = list(bb.exceptions.extract_traceback(tb, context=3))
|
||||||
# Need to turn the value into something the logging system can pickle
|
|
||||||
record.bb_exc_info = (etype, value, tb)
|
record.bb_exc_info = (etype, value, tb)
|
||||||
record.bb_exc_formatted = bb.exceptions.format_exception(etype, value, tb, limit=5)
|
|
||||||
value = str(value)
|
|
||||||
record.exc_info = None
|
record.exc_info = None
|
||||||
fire(record, None)
|
fire(record, None)
|
||||||
|
|
||||||
def filter(self, record):
|
def filter(self, record):
|
||||||
record.taskpid = worker_pid
|
record.taskpid = worker_pid
|
||||||
return True
|
return True
|
||||||
|
|
||||||
class RequestPackageInfo(Event):
|
|
||||||
"""
|
|
||||||
Event to request package information
|
|
||||||
"""
|
|
||||||
|
|
||||||
class PackageInfo(Event):
|
|
||||||
"""
|
|
||||||
Package information for GUI
|
|
||||||
"""
|
|
||||||
def __init__(self, pkginfolist):
|
|
||||||
Event.__init__(self)
|
|
||||||
self._pkginfolist = pkginfolist
|
|
||||||
|
|
||||||
class MetadataEvent(Event):
|
|
||||||
"""
|
|
||||||
Generic event that target for OE-Core classes
|
|
||||||
to report information during asynchrous execution
|
|
||||||
"""
|
|
||||||
def __init__(self, eventtype, eventdata):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.type = eventtype
|
|
||||||
self._localdata = eventdata
|
|
||||||
|
|
||||||
class ProcessStarted(Event):
|
|
||||||
"""
|
|
||||||
Generic process started event (usually part of the initial startup)
|
|
||||||
where further progress events will be delivered
|
|
||||||
"""
|
|
||||||
def __init__(self, processname, total):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.processname = processname
|
|
||||||
self.total = total
|
|
||||||
|
|
||||||
class ProcessProgress(Event):
|
|
||||||
"""
|
|
||||||
Generic process progress event (usually part of the initial startup)
|
|
||||||
"""
|
|
||||||
def __init__(self, processname, progress):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.processname = processname
|
|
||||||
self.progress = progress
|
|
||||||
|
|
||||||
class ProcessFinished(Event):
|
|
||||||
"""
|
|
||||||
Generic process finished event (usually part of the initial startup)
|
|
||||||
"""
|
|
||||||
def __init__(self, processname):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.processname = processname
|
|
||||||
|
|
||||||
class SanityCheck(Event):
|
|
||||||
"""
|
|
||||||
Event to run sanity checks, either raise errors or generate events as return status.
|
|
||||||
"""
|
|
||||||
def __init__(self, generateevents = True):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.generateevents = generateevents
|
|
||||||
|
|
||||||
class SanityCheckPassed(Event):
|
|
||||||
"""
|
|
||||||
Event to indicate sanity check has passed
|
|
||||||
"""
|
|
||||||
|
|
||||||
class SanityCheckFailed(Event):
|
|
||||||
"""
|
|
||||||
Event to indicate sanity check has failed
|
|
||||||
"""
|
|
||||||
def __init__(self, msg, network_error=False):
|
|
||||||
Event.__init__(self)
|
|
||||||
self._msg = msg
|
|
||||||
self._network_error = network_error
|
|
||||||
|
|
||||||
class NetworkTest(Event):
|
|
||||||
"""
|
|
||||||
Event to run network connectivity tests, either raise errors or generate events as return status.
|
|
||||||
"""
|
|
||||||
def __init__(self, generateevents = True):
|
|
||||||
Event.__init__(self)
|
|
||||||
self.generateevents = generateevents
|
|
||||||
|
|
||||||
class NetworkTestPassed(Event):
|
|
||||||
"""
|
|
||||||
Event to indicate network test has passed
|
|
||||||
"""
|
|
||||||
|
|
||||||
class NetworkTestFailed(Event):
|
|
||||||
"""
|
|
||||||
Event to indicate network test has failed
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
import inspect
|
import inspect
|
||||||
import traceback
|
import traceback
|
||||||
import bb.namedtuple_with_abc
|
import bb.namedtuple_with_abc
|
||||||
|
@ -32,14 +32,7 @@ class TracebackEntry(namedtuple.abc):
|
||||||
def _get_frame_args(frame):
|
def _get_frame_args(frame):
|
||||||
"""Get the formatted arguments and class (if available) for a frame"""
|
"""Get the formatted arguments and class (if available) for a frame"""
|
||||||
arginfo = inspect.getargvalues(frame)
|
arginfo = inspect.getargvalues(frame)
|
||||||
|
if not arginfo.args:
|
||||||
try:
|
|
||||||
if not arginfo.args:
|
|
||||||
return '', None
|
|
||||||
# There have been reports from the field of python 2.6 which doesn't
|
|
||||||
# return a namedtuple here but simply a tuple so fallback gracefully if
|
|
||||||
# args isn't present.
|
|
||||||
except AttributeError:
|
|
||||||
return '', None
|
return '', None
|
||||||
|
|
||||||
firstarg = arginfo.args[0]
|
firstarg = arginfo.args[0]
|
||||||
|
@ -86,6 +79,6 @@ def format_exception(etype, value, tb, context=1, limit=None, formatter=None):
|
||||||
|
|
||||||
def to_string(exc):
|
def to_string(exc):
|
||||||
if isinstance(exc, SystemExit):
|
if isinstance(exc, SystemExit):
|
||||||
if not isinstance(exc.code, str):
|
if not isinstance(exc.code, basestring):
|
||||||
return 'Exited with "%d"' % exc.code
|
return 'Exited with "%d"' % exc.code
|
||||||
return str(exc)
|
return str(exc)
|
||||||
|
|
|
@ -0,0 +1,832 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
Classes for obtaining upstream sources for the
|
||||||
|
BitBake build tools.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
import os, re
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb import persist_data
|
||||||
|
from bb import utils
|
||||||
|
|
||||||
|
__version__ = "1"
|
||||||
|
|
||||||
|
logger = logging.getLogger("BitBake.Fetch")
|
||||||
|
|
||||||
|
class MalformedUrl(Exception):
|
||||||
|
"""Exception raised when encountering an invalid url"""
|
||||||
|
|
||||||
|
class FetchError(Exception):
|
||||||
|
"""Exception raised when a download fails"""
|
||||||
|
|
||||||
|
class NoMethodError(Exception):
|
||||||
|
"""Exception raised when there is no method to obtain a supplied url or set of urls"""
|
||||||
|
|
||||||
|
class MissingParameterError(Exception):
|
||||||
|
"""Exception raised when a fetch method is missing a critical parameter in the url"""
|
||||||
|
|
||||||
|
class ParameterError(Exception):
|
||||||
|
"""Exception raised when a url cannot be proccessed due to invalid parameters."""
|
||||||
|
|
||||||
|
class MD5SumError(Exception):
|
||||||
|
"""Exception raised when a MD5SUM of a file does not match the expected one"""
|
||||||
|
|
||||||
|
class InvalidSRCREV(Exception):
|
||||||
|
"""Exception raised when an invalid SRCREV is encountered"""
|
||||||
|
|
||||||
|
def decodeurl(url):
|
||||||
|
"""Decodes an URL into the tokens (scheme, network location, path,
|
||||||
|
user, password, parameters).
|
||||||
|
"""
|
||||||
|
|
||||||
|
m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
|
||||||
|
if not m:
|
||||||
|
raise MalformedUrl(url)
|
||||||
|
|
||||||
|
type = m.group('type')
|
||||||
|
location = m.group('location')
|
||||||
|
if not location:
|
||||||
|
raise MalformedUrl(url)
|
||||||
|
user = m.group('user')
|
||||||
|
parm = m.group('parm')
|
||||||
|
|
||||||
|
locidx = location.find('/')
|
||||||
|
if locidx != -1 and type.lower() != 'file':
|
||||||
|
host = location[:locidx]
|
||||||
|
path = location[locidx:]
|
||||||
|
else:
|
||||||
|
host = ""
|
||||||
|
path = location
|
||||||
|
if user:
|
||||||
|
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
|
||||||
|
if m:
|
||||||
|
user = m.group('user')
|
||||||
|
pswd = m.group('pswd')
|
||||||
|
else:
|
||||||
|
user = ''
|
||||||
|
pswd = ''
|
||||||
|
|
||||||
|
p = {}
|
||||||
|
if parm:
|
||||||
|
for s in parm.split(';'):
|
||||||
|
s1, s2 = s.split('=')
|
||||||
|
p[s1] = s2
|
||||||
|
|
||||||
|
return (type, host, path, user, pswd, p)
|
||||||
|
|
||||||
|
def encodeurl(decoded):
|
||||||
|
"""Encodes a URL from tokens (scheme, network location, path,
|
||||||
|
user, password, parameters).
|
||||||
|
"""
|
||||||
|
|
||||||
|
(type, host, path, user, pswd, p) = decoded
|
||||||
|
|
||||||
|
if not type or not path:
|
||||||
|
raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
|
||||||
|
url = '%s://' % type
|
||||||
|
if user:
|
||||||
|
url += "%s" % user
|
||||||
|
if pswd:
|
||||||
|
url += ":%s" % pswd
|
||||||
|
url += "@"
|
||||||
|
if host:
|
||||||
|
url += "%s" % host
|
||||||
|
url += "%s" % path
|
||||||
|
if p:
|
||||||
|
for parm in p:
|
||||||
|
url += ";%s=%s" % (parm, p[parm])
|
||||||
|
|
||||||
|
return url
|
||||||
|
|
||||||
|
def uri_replace(uri, uri_find, uri_replace, d):
|
||||||
|
if not uri or not uri_find or not uri_replace:
|
||||||
|
logger.debug(1, "uri_replace: passed an undefined value, not replacing")
|
||||||
|
uri_decoded = list(decodeurl(uri))
|
||||||
|
uri_find_decoded = list(decodeurl(uri_find))
|
||||||
|
uri_replace_decoded = list(decodeurl(uri_replace))
|
||||||
|
result_decoded = ['', '', '', '', '', {}]
|
||||||
|
for i in uri_find_decoded:
|
||||||
|
loc = uri_find_decoded.index(i)
|
||||||
|
result_decoded[loc] = uri_decoded[loc]
|
||||||
|
if isinstance(i, basestring):
|
||||||
|
if (re.match(i, uri_decoded[loc])):
|
||||||
|
result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
|
||||||
|
if uri_find_decoded.index(i) == 2:
|
||||||
|
if d:
|
||||||
|
localfn = bb.fetch.localpath(uri, d)
|
||||||
|
if localfn:
|
||||||
|
result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d)))
|
||||||
|
else:
|
||||||
|
return uri
|
||||||
|
return encodeurl(result_decoded)
|
||||||
|
|
||||||
|
methods = []
|
||||||
|
urldata_cache = {}
|
||||||
|
saved_headrevs = {}
|
||||||
|
|
||||||
|
def fetcher_init(d):
|
||||||
|
"""
|
||||||
|
Called to initialize the fetchers once the configuration data is known.
|
||||||
|
Calls before this must not hit the cache.
|
||||||
|
"""
|
||||||
|
# When to drop SCM head revisions controlled by user policy
|
||||||
|
srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
|
||||||
|
if srcrev_policy == "cache":
|
||||||
|
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||||
|
elif srcrev_policy == "clear":
|
||||||
|
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
|
||||||
|
revs = persist_data.persist('BB_URI_HEADREVS', d)
|
||||||
|
try:
|
||||||
|
bb.fetch.saved_headrevs = revs.items()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
revs.clear()
|
||||||
|
else:
|
||||||
|
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
|
||||||
|
|
||||||
|
for m in methods:
|
||||||
|
if hasattr(m, "init"):
|
||||||
|
m.init(d)
|
||||||
|
|
||||||
|
def fetcher_compare_revisions(d):
|
||||||
|
"""
|
||||||
|
Compare the revisions in the persistant cache with current values and
|
||||||
|
return true/false on whether they've changed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = persist_data.persist('BB_URI_HEADREVS', d).items()
|
||||||
|
data2 = bb.fetch.saved_headrevs
|
||||||
|
|
||||||
|
changed = False
|
||||||
|
for key in data:
|
||||||
|
if key not in data2 or data2[key] != data[key]:
|
||||||
|
logger.debug(1, "%s changed", key)
|
||||||
|
changed = True
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.debug(2, "%s did not change", key)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Function call order is usually:
|
||||||
|
# 1. init
|
||||||
|
# 2. go
|
||||||
|
# 3. localpaths
|
||||||
|
# localpath can be called at any time
|
||||||
|
|
||||||
|
def init(urls, d, setup = True):
|
||||||
|
urldata = {}
|
||||||
|
|
||||||
|
fn = bb.data.getVar('FILE', d, 1)
|
||||||
|
if fn in urldata_cache:
|
||||||
|
urldata = urldata_cache[fn]
|
||||||
|
|
||||||
|
for url in urls:
|
||||||
|
if url not in urldata:
|
||||||
|
urldata[url] = FetchData(url, d)
|
||||||
|
|
||||||
|
if setup:
|
||||||
|
for url in urldata:
|
||||||
|
if not urldata[url].setup:
|
||||||
|
urldata[url].setup_localpath(d)
|
||||||
|
|
||||||
|
urldata_cache[fn] = urldata
|
||||||
|
return urldata
|
||||||
|
|
||||||
|
def mirror_from_string(data):
|
||||||
|
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
|
||||||
|
|
||||||
|
def verify_checksum(u, ud, d):
|
||||||
|
"""
|
||||||
|
verify the MD5 and SHA256 checksum for downloaded src
|
||||||
|
|
||||||
|
return value:
|
||||||
|
- True: checksum matched
|
||||||
|
- False: checksum unmatched
|
||||||
|
|
||||||
|
if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
|
||||||
|
if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
|
||||||
|
matched
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not ud.type in ["http", "https", "ftp", "ftps"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
md5data = bb.utils.md5_file(ud.localpath)
|
||||||
|
sha256data = bb.utils.sha256_file(ud.localpath)
|
||||||
|
|
||||||
|
if (ud.md5_expected == None or ud.sha256_expected == None):
|
||||||
|
logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
|
||||||
|
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
|
||||||
|
ud.localpath, ud.md5_name, md5data,
|
||||||
|
ud.sha256_name, sha256data)
|
||||||
|
if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
|
||||||
|
raise FetchError("No checksum specified for %s." % u)
|
||||||
|
return
|
||||||
|
|
||||||
|
if (ud.md5_expected != md5data or ud.sha256_expected != sha256data):
|
||||||
|
logger.error('The checksums for "%s" did not match.\n'
|
||||||
|
' MD5: expected "%s", got "%s"\n'
|
||||||
|
' SHA256: expected "%s", got "%s"\n',
|
||||||
|
ud.localpath, ud.md5_expected, md5data,
|
||||||
|
ud.sha256_expected, sha256data)
|
||||||
|
raise FetchError("%s checksum mismatch." % u)
|
||||||
|
|
||||||
|
def go(d, urls = None):
|
||||||
|
"""
|
||||||
|
Fetch all urls
|
||||||
|
init must have previously been called
|
||||||
|
"""
|
||||||
|
if not urls:
|
||||||
|
urls = d.getVar("SRC_URI", 1).split()
|
||||||
|
urldata = init(urls, d, True)
|
||||||
|
|
||||||
|
for u in urls:
|
||||||
|
ud = urldata[u]
|
||||||
|
m = ud.method
|
||||||
|
localpath = ""
|
||||||
|
|
||||||
|
if not ud.localfile:
|
||||||
|
continue
|
||||||
|
|
||||||
|
lf = bb.utils.lockfile(ud.lockfile)
|
||||||
|
|
||||||
|
if m.try_premirror(u, ud, d):
|
||||||
|
# First try fetching uri, u, from PREMIRRORS
|
||||||
|
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
||||||
|
localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
|
||||||
|
elif os.path.exists(ud.localfile):
|
||||||
|
localpath = ud.localfile
|
||||||
|
|
||||||
|
# Need to re-test forcefetch() which will return true if our copy is too old
|
||||||
|
if m.forcefetch(u, ud, d) or not localpath:
|
||||||
|
# Next try fetching from the original uri, u
|
||||||
|
try:
|
||||||
|
m.go(u, ud, d)
|
||||||
|
localpath = ud.localpath
|
||||||
|
except FetchError:
|
||||||
|
# Remove any incomplete file
|
||||||
|
bb.utils.remove(ud.localpath)
|
||||||
|
# Finally, try fetching uri, u, from MIRRORS
|
||||||
|
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
||||||
|
localpath = try_mirrors (d, u, mirrors)
|
||||||
|
if not localpath or not os.path.exists(localpath):
|
||||||
|
raise FetchError("Unable to fetch URL %s from any source." % u)
|
||||||
|
|
||||||
|
ud.localpath = localpath
|
||||||
|
|
||||||
|
if os.path.exists(ud.md5):
|
||||||
|
# Touch the md5 file to show active use of the download
|
||||||
|
try:
|
||||||
|
os.utime(ud.md5, None)
|
||||||
|
except:
|
||||||
|
# Errors aren't fatal here
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# Only check the checksums if we've not seen this item before
|
||||||
|
verify_checksum(u, ud, d)
|
||||||
|
Fetch.write_md5sum(u, ud, d)
|
||||||
|
|
||||||
|
bb.utils.unlockfile(lf)
|
||||||
|
|
||||||
|
def checkstatus(d, urls = None):
|
||||||
|
"""
|
||||||
|
Check all urls exist upstream
|
||||||
|
init must have previously been called
|
||||||
|
"""
|
||||||
|
urldata = init([], d, True)
|
||||||
|
|
||||||
|
if not urls:
|
||||||
|
urls = urldata
|
||||||
|
|
||||||
|
for u in urls:
|
||||||
|
ud = urldata[u]
|
||||||
|
m = ud.method
|
||||||
|
logger.debug(1, "Testing URL %s", u)
|
||||||
|
# First try checking uri, u, from PREMIRRORS
|
||||||
|
mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
|
||||||
|
ret = try_mirrors(d, u, mirrors, True)
|
||||||
|
if not ret:
|
||||||
|
# Next try checking from the original uri, u
|
||||||
|
try:
|
||||||
|
ret = m.checkstatus(u, ud, d)
|
||||||
|
except:
|
||||||
|
# Finally, try checking uri, u, from MIRRORS
|
||||||
|
mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
|
||||||
|
ret = try_mirrors (d, u, mirrors, True)
|
||||||
|
|
||||||
|
if not ret:
|
||||||
|
raise FetchError("URL %s doesn't work" % u)
|
||||||
|
|
||||||
|
def localpaths(d):
|
||||||
|
"""
|
||||||
|
Return a list of the local filenames, assuming successful fetch
|
||||||
|
"""
|
||||||
|
local = []
|
||||||
|
urldata = init([], d, True)
|
||||||
|
|
||||||
|
for u in urldata:
|
||||||
|
ud = urldata[u]
|
||||||
|
local.append(ud.localpath)
|
||||||
|
|
||||||
|
return local
|
||||||
|
|
||||||
|
srcrev_internal_call = False
|
||||||
|
|
||||||
|
def get_autorev(d):
|
||||||
|
return get_srcrev(d)
|
||||||
|
|
||||||
|
def get_srcrev(d):
|
||||||
|
"""
|
||||||
|
Return the version string for the current package
|
||||||
|
(usually to be used as PV)
|
||||||
|
Most packages usually only have one SCM so we just pass on the call.
|
||||||
|
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
|
||||||
|
have been set.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#
|
||||||
|
# Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
|
||||||
|
# could translate into a call to here. If it does, we need to catch this
|
||||||
|
# and provide some way so it knows get_srcrev is active instead of being
|
||||||
|
# some number etc. hence the srcrev_internal_call tracking and the magic
|
||||||
|
# "SRCREVINACTION" return value.
|
||||||
|
#
|
||||||
|
# Neater solutions welcome!
|
||||||
|
#
|
||||||
|
if bb.fetch.srcrev_internal_call:
|
||||||
|
return "SRCREVINACTION"
|
||||||
|
|
||||||
|
scms = []
|
||||||
|
|
||||||
|
# Only call setup_localpath on URIs which supports_srcrev()
|
||||||
|
urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
|
||||||
|
for u in urldata:
|
||||||
|
ud = urldata[u]
|
||||||
|
if ud.method.supports_srcrev():
|
||||||
|
if not ud.setup:
|
||||||
|
ud.setup_localpath(d)
|
||||||
|
scms.append(u)
|
||||||
|
|
||||||
|
if len(scms) == 0:
|
||||||
|
logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
|
||||||
|
raise ParameterError
|
||||||
|
|
||||||
|
if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
|
||||||
|
bb.data.setVar('__BB_DONT_CACHE', '1', d)
|
||||||
|
|
||||||
|
if len(scms) == 1:
|
||||||
|
return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
|
||||||
|
#
|
||||||
|
format = bb.data.getVar('SRCREV_FORMAT', d, 1)
|
||||||
|
if not format:
|
||||||
|
logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
|
||||||
|
raise ParameterError
|
||||||
|
|
||||||
|
for scm in scms:
|
||||||
|
if 'name' in urldata[scm].parm:
|
||||||
|
name = urldata[scm].parm["name"]
|
||||||
|
rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
|
||||||
|
format = format.replace(name, rev)
|
||||||
|
|
||||||
|
return format
|
||||||
|
|
||||||
|
def localpath(url, d, cache = True):
|
||||||
|
"""
|
||||||
|
Called from the parser with cache=False since the cache isn't ready
|
||||||
|
at this point. Also called from classed in OE e.g. patch.bbclass
|
||||||
|
"""
|
||||||
|
ud = init([url], d)
|
||||||
|
if ud[url].method:
|
||||||
|
return ud[url].localpath
|
||||||
|
return url
|
||||||
|
|
||||||
|
def runfetchcmd(cmd, d, quiet = False):
|
||||||
|
"""
|
||||||
|
Run cmd returning the command output
|
||||||
|
Raise an error if interrupted or cmd fails
|
||||||
|
Optionally echo command output to stdout
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Need to export PATH as binary could be in metadata paths
|
||||||
|
# rather than host provided
|
||||||
|
# Also include some other variables.
|
||||||
|
# FIXME: Should really include all export varaiables?
|
||||||
|
exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
|
||||||
|
'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
|
||||||
|
'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
|
||||||
|
'KRB5CCNAME', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
|
||||||
|
|
||||||
|
for var in exportvars:
|
||||||
|
val = data.getVar(var, d, True)
|
||||||
|
if val:
|
||||||
|
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
|
||||||
|
|
||||||
|
logger.debug(1, "Running %s", cmd)
|
||||||
|
|
||||||
|
# redirect stderr to stdout
|
||||||
|
stdout_handle = os.popen(cmd + " 2>&1", "r")
|
||||||
|
output = ""
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = stdout_handle.readline()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
if not quiet:
|
||||||
|
print(line, end=' ')
|
||||||
|
output += line
|
||||||
|
|
||||||
|
status = stdout_handle.close() or 0
|
||||||
|
signal = status >> 8
|
||||||
|
exitstatus = status & 0xff
|
||||||
|
|
||||||
|
if signal:
|
||||||
|
raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
|
||||||
|
elif status != 0:
|
||||||
|
raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
def try_mirrors(d, uri, mirrors, check = False, force = False):
|
||||||
|
"""
|
||||||
|
Try to use a mirrored version of the sources.
|
||||||
|
This method will be automatically called before the fetchers go.
|
||||||
|
|
||||||
|
d Is a bb.data instance
|
||||||
|
uri is the original uri we're trying to download
|
||||||
|
mirrors is the list of mirrors we're going to try
|
||||||
|
"""
|
||||||
|
fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
|
||||||
|
if not check and os.access(fpath, os.R_OK) and not force:
|
||||||
|
logger.debug(1, "%s already exists, skipping checkout.", fpath)
|
||||||
|
return fpath
|
||||||
|
|
||||||
|
ld = d.createCopy()
|
||||||
|
for (find, replace) in mirrors:
|
||||||
|
newuri = uri_replace(uri, find, replace, ld)
|
||||||
|
if newuri != uri:
|
||||||
|
try:
|
||||||
|
ud = FetchData(newuri, ld)
|
||||||
|
except bb.fetch.NoMethodError:
|
||||||
|
logger.debug(1, "No method for %s", uri)
|
||||||
|
continue
|
||||||
|
|
||||||
|
ud.setup_localpath(ld)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if check:
|
||||||
|
found = ud.method.checkstatus(newuri, ud, ld)
|
||||||
|
if found:
|
||||||
|
return found
|
||||||
|
else:
|
||||||
|
ud.method.go(newuri, ud, ld)
|
||||||
|
return ud.localpath
|
||||||
|
except (bb.fetch.MissingParameterError,
|
||||||
|
bb.fetch.FetchError,
|
||||||
|
bb.fetch.MD5SumError):
|
||||||
|
import sys
|
||||||
|
(type, value, traceback) = sys.exc_info()
|
||||||
|
logger.debug(2, "Mirror fetch failure: %s", value)
|
||||||
|
bb.utils.remove(ud.localpath)
|
||||||
|
continue
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class FetchData(object):
|
||||||
|
"""
|
||||||
|
A class which represents the fetcher state for a given URI.
|
||||||
|
"""
|
||||||
|
def __init__(self, url, d):
|
||||||
|
self.localfile = ""
|
||||||
|
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
|
||||||
|
self.date = Fetch.getSRCDate(self, d)
|
||||||
|
self.url = url
|
||||||
|
if not self.user and "user" in self.parm:
|
||||||
|
self.user = self.parm["user"]
|
||||||
|
if not self.pswd and "pswd" in self.parm:
|
||||||
|
self.pswd = self.parm["pswd"]
|
||||||
|
self.setup = False
|
||||||
|
|
||||||
|
if "name" in self.parm:
|
||||||
|
self.md5_name = "%s.md5sum" % self.parm["name"]
|
||||||
|
self.sha256_name = "%s.sha256sum" % self.parm["name"]
|
||||||
|
else:
|
||||||
|
self.md5_name = "md5sum"
|
||||||
|
self.sha256_name = "sha256sum"
|
||||||
|
self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
|
||||||
|
self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
|
||||||
|
|
||||||
|
for m in methods:
|
||||||
|
if m.supports(url, self, d):
|
||||||
|
self.method = m
|
||||||
|
return
|
||||||
|
raise NoMethodError("Missing implementation for url %s" % url)
|
||||||
|
|
||||||
|
def setup_localpath(self, d):
|
||||||
|
self.setup = True
|
||||||
|
if "localpath" in self.parm:
|
||||||
|
# if user sets localpath for file, use it instead.
|
||||||
|
self.localpath = self.parm["localpath"]
|
||||||
|
self.basename = os.path.basename(self.localpath)
|
||||||
|
else:
|
||||||
|
premirrors = bb.data.getVar('PREMIRRORS', d, True)
|
||||||
|
local = ""
|
||||||
|
if premirrors and self.url:
|
||||||
|
aurl = self.url.split(";")[0]
|
||||||
|
mirrors = mirror_from_string(premirrors)
|
||||||
|
for (find, replace) in mirrors:
|
||||||
|
if replace.startswith("file://"):
|
||||||
|
path = aurl.split("://")[1]
|
||||||
|
path = path.split(";")[0]
|
||||||
|
local = replace.split("://")[1] + os.path.basename(path)
|
||||||
|
if local == aurl or not os.path.exists(local) or os.path.isdir(local):
|
||||||
|
local = ""
|
||||||
|
self.localpath = local
|
||||||
|
if not local:
|
||||||
|
try:
|
||||||
|
bb.fetch.srcrev_internal_call = True
|
||||||
|
self.localpath = self.method.localpath(self.url, self, d)
|
||||||
|
finally:
|
||||||
|
bb.fetch.srcrev_internal_call = False
|
||||||
|
# We have to clear data's internal caches since the cached value of SRCREV is now wrong.
|
||||||
|
# Horrible...
|
||||||
|
bb.data.delVar("ISHOULDNEVEREXIST", d)
|
||||||
|
|
||||||
|
if self.localpath is not None:
|
||||||
|
# Note: These files should always be in DL_DIR whereas localpath may not be.
|
||||||
|
basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
|
||||||
|
self.md5 = basepath + '.md5'
|
||||||
|
self.lockfile = basepath + '.lock'
|
||||||
|
|
||||||
|
|
||||||
|
class Fetch(object):
|
||||||
|
"""Base class for 'fetch'ing data"""
|
||||||
|
|
||||||
|
def __init__(self, urls = []):
|
||||||
|
self.urls = []
|
||||||
|
|
||||||
|
def supports(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Check to see if this fetch class supports a given url.
|
||||||
|
"""
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def localpath(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Return the local filename of a given url assuming a successful fetch.
|
||||||
|
Can also setup variables in urldata for use in go (saving code duplication
|
||||||
|
and duplicate code execution)
|
||||||
|
"""
|
||||||
|
return url
|
||||||
|
def _strip_leading_slashes(self, relpath):
|
||||||
|
"""
|
||||||
|
Remove leading slash as os.path.join can't cope
|
||||||
|
"""
|
||||||
|
while os.path.isabs(relpath):
|
||||||
|
relpath = relpath[1:]
|
||||||
|
return relpath
|
||||||
|
|
||||||
|
def setUrls(self, urls):
|
||||||
|
self.__urls = urls
|
||||||
|
|
||||||
|
def getUrls(self):
|
||||||
|
return self.__urls
|
||||||
|
|
||||||
|
urls = property(getUrls, setUrls, None, "Urls property")
|
||||||
|
|
||||||
|
def forcefetch(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Force a fetch, even if localpath exists?
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
def supports_srcrev(self):
|
||||||
|
"""
|
||||||
|
The fetcher supports auto source revisions (SRCREV)
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
def go(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Fetch urls
|
||||||
|
Assumes localpath was called first
|
||||||
|
"""
|
||||||
|
raise NoMethodError("Missing implementation for url")
|
||||||
|
|
||||||
|
def try_premirror(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Should premirrors be used?
|
||||||
|
"""
|
||||||
|
if urldata.method.forcefetch(url, urldata, d):
|
||||||
|
return True
|
||||||
|
elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def checkstatus(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Check the status of a URL
|
||||||
|
Assumes localpath was called first
|
||||||
|
"""
|
||||||
|
logger.info("URL %s could not be checked for status since no method exists.", url)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def getSRCDate(urldata, d):
|
||||||
|
"""
|
||||||
|
Return the SRC Date for the component
|
||||||
|
|
||||||
|
d the bb.data module
|
||||||
|
"""
|
||||||
|
if "srcdate" in urldata.parm:
|
||||||
|
return urldata.parm['srcdate']
|
||||||
|
|
||||||
|
pn = data.getVar("PN", d, 1)
|
||||||
|
|
||||||
|
if pn:
|
||||||
|
return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
||||||
|
|
||||||
|
return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
|
||||||
|
getSRCDate = staticmethod(getSRCDate)
|
||||||
|
|
||||||
|
def srcrev_internal_helper(ud, d):
|
||||||
|
"""
|
||||||
|
Return:
|
||||||
|
a) a source revision if specified
|
||||||
|
b) True if auto srcrev is in action
|
||||||
|
c) False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
if 'rev' in ud.parm:
|
||||||
|
return ud.parm['rev']
|
||||||
|
|
||||||
|
if 'tag' in ud.parm:
|
||||||
|
return ud.parm['tag']
|
||||||
|
|
||||||
|
rev = None
|
||||||
|
if 'name' in ud.parm:
|
||||||
|
pn = data.getVar("PN", d, 1)
|
||||||
|
rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
|
||||||
|
if not rev:
|
||||||
|
rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
|
||||||
|
if not rev:
|
||||||
|
rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
|
||||||
|
if not rev:
|
||||||
|
rev = data.getVar("SRCREV", d, 1)
|
||||||
|
if rev == "INVALID":
|
||||||
|
raise InvalidSRCREV("Please set SRCREV to a valid value")
|
||||||
|
if not rev:
|
||||||
|
return False
|
||||||
|
if rev == "SRCREVINACTION":
|
||||||
|
return True
|
||||||
|
return rev
|
||||||
|
|
||||||
|
srcrev_internal_helper = staticmethod(srcrev_internal_helper)
|
||||||
|
|
||||||
|
def localcount_internal_helper(ud, d):
|
||||||
|
"""
|
||||||
|
Return:
|
||||||
|
a) a locked localcount if specified
|
||||||
|
b) None otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
localcount = None
|
||||||
|
if 'name' in ud.parm:
|
||||||
|
pn = data.getVar("PN", d, 1)
|
||||||
|
localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
|
||||||
|
if not localcount:
|
||||||
|
localcount = data.getVar("LOCALCOUNT", d, 1)
|
||||||
|
return localcount
|
||||||
|
|
||||||
|
localcount_internal_helper = staticmethod(localcount_internal_helper)
|
||||||
|
|
||||||
|
def verify_md5sum(ud, got_sum):
|
||||||
|
"""
|
||||||
|
Verify the md5sum we wanted with the one we got
|
||||||
|
"""
|
||||||
|
wanted_sum = ud.parm.get('md5sum')
|
||||||
|
if not wanted_sum:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return wanted_sum == got_sum
|
||||||
|
verify_md5sum = staticmethod(verify_md5sum)
|
||||||
|
|
||||||
|
def write_md5sum(url, ud, d):
|
||||||
|
md5data = bb.utils.md5_file(ud.localpath)
|
||||||
|
# verify the md5sum
|
||||||
|
if not Fetch.verify_md5sum(ud, md5data):
|
||||||
|
raise MD5SumError(url)
|
||||||
|
|
||||||
|
md5out = file(ud.md5, 'w')
|
||||||
|
md5out.write(md5data)
|
||||||
|
md5out.close()
|
||||||
|
write_md5sum = staticmethod(write_md5sum)
|
||||||
|
|
||||||
|
def latest_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Look in the cache for the latest revision, if not present ask the SCM.
|
||||||
|
"""
|
||||||
|
if not hasattr(self, "_latest_revision"):
|
||||||
|
raise ParameterError
|
||||||
|
|
||||||
|
revs = persist_data.persist('BB_URI_HEADREVS', d)
|
||||||
|
key = self.generate_revision_key(url, ud, d)
|
||||||
|
try:
|
||||||
|
return revs[key]
|
||||||
|
except KeyError:
|
||||||
|
revs[key] = rev = self._latest_revision(url, ud, d)
|
||||||
|
return rev
|
||||||
|
|
||||||
|
def sortable_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
if hasattr(self, "_sortable_revision"):
|
||||||
|
return self._sortable_revision(url, ud, d)
|
||||||
|
|
||||||
|
localcounts = persist_data.persist('BB_URI_LOCALCOUNT', d)
|
||||||
|
key = self.generate_revision_key(url, ud, d)
|
||||||
|
|
||||||
|
latest_rev = self._build_revision(url, ud, d)
|
||||||
|
last_rev = localcounts.get(key + '_rev')
|
||||||
|
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||||
|
count = None
|
||||||
|
if uselocalcount:
|
||||||
|
count = Fetch.localcount_internal_helper(ud, d)
|
||||||
|
if count is None:
|
||||||
|
count = localcounts.get(key + '_count')
|
||||||
|
|
||||||
|
if last_rev == latest_rev:
|
||||||
|
return str(count + "+" + latest_rev)
|
||||||
|
|
||||||
|
buildindex_provided = hasattr(self, "_sortable_buildindex")
|
||||||
|
if buildindex_provided:
|
||||||
|
count = self._sortable_buildindex(url, ud, d, latest_rev)
|
||||||
|
|
||||||
|
if count is None:
|
||||||
|
count = "0"
|
||||||
|
elif uselocalcount or buildindex_provided:
|
||||||
|
count = str(count)
|
||||||
|
else:
|
||||||
|
count = str(int(count) + 1)
|
||||||
|
|
||||||
|
localcounts[key + '_rev'] = latest_rev
|
||||||
|
localcounts[key + '_count'] = count
|
||||||
|
|
||||||
|
return str(count + "+" + latest_rev)
|
||||||
|
|
||||||
|
def generate_revision_key(self, url, ud, d):
|
||||||
|
key = self._revision_key(url, ud, d)
|
||||||
|
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||||
|
|
||||||
|
from . import cvs
|
||||||
|
from . import git
|
||||||
|
from . import local
|
||||||
|
from . import svn
|
||||||
|
from . import wget
|
||||||
|
from . import svk
|
||||||
|
from . import ssh
|
||||||
|
from . import perforce
|
||||||
|
from . import bzr
|
||||||
|
from . import hg
|
||||||
|
from . import osc
|
||||||
|
from . import repo
|
||||||
|
|
||||||
|
methods.append(local.Local())
|
||||||
|
methods.append(wget.Wget())
|
||||||
|
methods.append(svn.Svn())
|
||||||
|
methods.append(git.Git())
|
||||||
|
methods.append(cvs.Cvs())
|
||||||
|
methods.append(svk.Svk())
|
||||||
|
methods.append(ssh.SSH())
|
||||||
|
methods.append(perforce.Perforce())
|
||||||
|
methods.append(bzr.Bzr())
|
||||||
|
methods.append(hg.Hg())
|
||||||
|
methods.append(osc.Osc())
|
||||||
|
methods.append(repo.Repo())
|
|
@ -0,0 +1,148 @@
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementation for bzr.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2007 Ross Burton
|
||||||
|
# Copyright (C) 2007 Richard Purdie
|
||||||
|
#
|
||||||
|
# Classes for obtaining upstream sources for the
|
||||||
|
# BitBake build tools.
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch, FetchError, runfetchcmd, logger
|
||||||
|
|
||||||
|
class Bzr(Fetch):
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
return ud.type in ['bzr']
|
||||||
|
|
||||||
|
def localpath (self, url, ud, d):
|
||||||
|
|
||||||
|
# Create paths to bzr checkouts
|
||||||
|
relpath = self._strip_leading_slashes(ud.path)
|
||||||
|
ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
|
||||||
|
|
||||||
|
revision = Fetch.srcrev_internal_helper(ud, d)
|
||||||
|
if revision is True:
|
||||||
|
ud.revision = self.latest_revision(url, ud, d)
|
||||||
|
elif revision:
|
||||||
|
ud.revision = revision
|
||||||
|
|
||||||
|
if not ud.revision:
|
||||||
|
ud.revision = self.latest_revision(url, ud, d)
|
||||||
|
|
||||||
|
ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def _buildbzrcommand(self, ud, d, command):
|
||||||
|
"""
|
||||||
|
Build up an bzr commandline based on ud
|
||||||
|
command is "fetch", "update", "revno"
|
||||||
|
"""
|
||||||
|
|
||||||
|
basecmd = data.expand('${FETCHCMD_bzr}', d)
|
||||||
|
|
||||||
|
proto = ud.parm.get('proto', 'http')
|
||||||
|
|
||||||
|
bzrroot = ud.host + ud.path
|
||||||
|
|
||||||
|
options = []
|
||||||
|
|
||||||
|
if command == "revno":
|
||||||
|
bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||||
|
else:
|
||||||
|
if ud.revision:
|
||||||
|
options.append("-r %s" % ud.revision)
|
||||||
|
|
||||||
|
if command == "fetch":
|
||||||
|
bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||||
|
elif command == "update":
|
||||||
|
bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
|
||||||
|
else:
|
||||||
|
raise FetchError("Invalid bzr command %s" % command)
|
||||||
|
|
||||||
|
return bzrcmd
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""Fetch url"""
|
||||||
|
|
||||||
|
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
||||||
|
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
||||||
|
logger.debug(1, "BZR Update %s", loc)
|
||||||
|
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
|
||||||
|
runfetchcmd(bzrcmd, d)
|
||||||
|
else:
|
||||||
|
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
|
||||||
|
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
||||||
|
logger.debug(1, "BZR Checkout %s", loc)
|
||||||
|
bb.utils.mkdirhier(ud.pkgdir)
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
logger.debug(1, "Running %s", bzrcmd)
|
||||||
|
runfetchcmd(bzrcmd, d)
|
||||||
|
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
|
||||||
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
|
if scmdata == "keep":
|
||||||
|
tar_flags = ""
|
||||||
|
else:
|
||||||
|
tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
|
||||||
|
|
||||||
|
# tar them up to a defined filename
|
||||||
|
try:
|
||||||
|
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d)
|
||||||
|
except:
|
||||||
|
t, v, tb = sys.exc_info()
|
||||||
|
try:
|
||||||
|
os.unlink(ud.localpath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise t, v, tb
|
||||||
|
|
||||||
|
def supports_srcrev(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _revision_key(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Return a unique key for the url
|
||||||
|
"""
|
||||||
|
return "bzr:" + ud.pkgdir
|
||||||
|
|
||||||
|
def _latest_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Return the latest upstream revision number
|
||||||
|
"""
|
||||||
|
logger.debug(2, "BZR fetcher hitting network for %s", url)
|
||||||
|
|
||||||
|
output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
|
||||||
|
|
||||||
|
return output.strip()
|
||||||
|
|
||||||
|
def _sortable_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Return a sortable revision number which in our case is the revision number
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self._build_revision(url, ud, d)
|
||||||
|
|
||||||
|
def _build_revision(self, url, ud, d):
|
||||||
|
return ud.revision
|
|
@ -0,0 +1,172 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
Classes for obtaining upstream sources for the
|
||||||
|
BitBake build tools.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
#Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
#
|
||||||
|
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch, FetchError, MissingParameterError, logger
|
||||||
|
|
||||||
|
class Cvs(Fetch):
|
||||||
|
"""
|
||||||
|
Class to fetch a module or modules from cvs repositories
|
||||||
|
"""
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with cvs.
|
||||||
|
"""
|
||||||
|
return ud.type in ['cvs']
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
if not "module" in ud.parm:
|
||||||
|
raise MissingParameterError("cvs method needs a 'module' parameter")
|
||||||
|
ud.module = ud.parm["module"]
|
||||||
|
|
||||||
|
ud.tag = ud.parm.get('tag', "")
|
||||||
|
|
||||||
|
# Override the default date in certain cases
|
||||||
|
if 'date' in ud.parm:
|
||||||
|
ud.date = ud.parm['date']
|
||||||
|
elif ud.tag:
|
||||||
|
ud.date = ""
|
||||||
|
|
||||||
|
norecurse = ''
|
||||||
|
if 'norecurse' in ud.parm:
|
||||||
|
norecurse = '_norecurse'
|
||||||
|
|
||||||
|
fullpath = ''
|
||||||
|
if 'fullpath' in ud.parm:
|
||||||
|
fullpath = '_fullpath'
|
||||||
|
|
||||||
|
ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def forcefetch(self, url, ud, d):
|
||||||
|
if (ud.date == "now"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
|
||||||
|
method = ud.parm.get('method', 'pserver')
|
||||||
|
localdir = ud.parm.get('localdir', ud.module)
|
||||||
|
cvs_port = ud.parm.get('port', '')
|
||||||
|
|
||||||
|
cvs_rsh = None
|
||||||
|
if method == "ext":
|
||||||
|
if "rsh" in ud.parm:
|
||||||
|
cvs_rsh = ud.parm["rsh"]
|
||||||
|
|
||||||
|
if method == "dir":
|
||||||
|
cvsroot = ud.path
|
||||||
|
else:
|
||||||
|
cvsroot = ":" + method
|
||||||
|
cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
|
||||||
|
if cvsproxyhost:
|
||||||
|
cvsroot += ";proxy=" + cvsproxyhost
|
||||||
|
cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
|
||||||
|
if cvsproxyport:
|
||||||
|
cvsroot += ";proxyport=" + cvsproxyport
|
||||||
|
cvsroot += ":" + ud.user
|
||||||
|
if ud.pswd:
|
||||||
|
cvsroot += ":" + ud.pswd
|
||||||
|
cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
|
||||||
|
|
||||||
|
options = []
|
||||||
|
if 'norecurse' in ud.parm:
|
||||||
|
options.append("-l")
|
||||||
|
if ud.date:
|
||||||
|
# treat YYYYMMDDHHMM specially for CVS
|
||||||
|
if len(ud.date) == 12:
|
||||||
|
options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
|
||||||
|
else:
|
||||||
|
options.append("-D \"%s UTC\"" % ud.date)
|
||||||
|
if ud.tag:
|
||||||
|
options.append("-r %s" % ud.tag)
|
||||||
|
|
||||||
|
localdata = data.createCopy(d)
|
||||||
|
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||||
|
data.update_data(localdata)
|
||||||
|
|
||||||
|
data.setVar('CVSROOT', cvsroot, localdata)
|
||||||
|
data.setVar('CVSCOOPTS', " ".join(options), localdata)
|
||||||
|
data.setVar('CVSMODULE', ud.module, localdata)
|
||||||
|
cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
||||||
|
cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
|
||||||
|
|
||||||
|
if cvs_rsh:
|
||||||
|
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
|
||||||
|
cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
|
||||||
|
|
||||||
|
# create module directory
|
||||||
|
logger.debug(2, "Fetch: checking for module directory")
|
||||||
|
pkg = data.expand('${PN}', d)
|
||||||
|
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||||
|
moddir = os.path.join(pkgdir, localdir)
|
||||||
|
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
||||||
|
logger.info("Update " + loc)
|
||||||
|
# update sources there
|
||||||
|
os.chdir(moddir)
|
||||||
|
myret = os.system(cvsupdatecmd)
|
||||||
|
else:
|
||||||
|
logger.info("Fetch " + loc)
|
||||||
|
# check out sources there
|
||||||
|
bb.utils.mkdirhier(pkgdir)
|
||||||
|
os.chdir(pkgdir)
|
||||||
|
logger.debug(1, "Running %s", cvscmd)
|
||||||
|
myret = os.system(cvscmd)
|
||||||
|
|
||||||
|
if myret != 0 or not os.access(moddir, os.R_OK):
|
||||||
|
try:
|
||||||
|
os.rmdir(moddir)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise FetchError(ud.module)
|
||||||
|
|
||||||
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
|
if scmdata == "keep":
|
||||||
|
tar_flags = ""
|
||||||
|
else:
|
||||||
|
tar_flags = "--exclude 'CVS'"
|
||||||
|
|
||||||
|
# tar them up to a defined filename
|
||||||
|
if 'fullpath' in ud.parm:
|
||||||
|
os.chdir(pkgdir)
|
||||||
|
myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir))
|
||||||
|
else:
|
||||||
|
os.chdir(moddir)
|
||||||
|
os.chdir('..')
|
||||||
|
myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)))
|
||||||
|
|
||||||
|
if myret != 0:
|
||||||
|
try:
|
||||||
|
os.unlink(ud.localpath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise FetchError(ud.module)
|
|
@ -0,0 +1,339 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' git implementation
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
#Copyright (C) 2005 Richard Purdie
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import bb
|
||||||
|
import bb.persist_data
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import runfetchcmd
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
|
class Git(Fetch):
|
||||||
|
"""Class to fetch a module or modules from git repositories"""
|
||||||
|
def init(self, d):
|
||||||
|
#
|
||||||
|
# Only enable _sortable revision if the key is set
|
||||||
|
#
|
||||||
|
if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
|
||||||
|
self._sortable_buildindex = self._sortable_buildindex_disabled
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with git.
|
||||||
|
"""
|
||||||
|
return ud.type in ['git']
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
|
||||||
|
if 'protocol' in ud.parm:
|
||||||
|
ud.proto = ud.parm['protocol']
|
||||||
|
elif not ud.host:
|
||||||
|
ud.proto = 'file'
|
||||||
|
else:
|
||||||
|
ud.proto = "rsync"
|
||||||
|
|
||||||
|
ud.branch = ud.parm.get("branch", "master")
|
||||||
|
|
||||||
|
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
|
||||||
|
ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname)
|
||||||
|
ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||||
|
|
||||||
|
tag = Fetch.srcrev_internal_helper(ud, d)
|
||||||
|
if tag is True:
|
||||||
|
ud.tag = self.latest_revision(url, ud, d)
|
||||||
|
elif tag:
|
||||||
|
ud.tag = tag
|
||||||
|
|
||||||
|
if not ud.tag or ud.tag == "master":
|
||||||
|
ud.tag = self.latest_revision(url, ud, d)
|
||||||
|
|
||||||
|
subdir = ud.parm.get("subpath", "")
|
||||||
|
if subdir != "":
|
||||||
|
if subdir.endswith("/"):
|
||||||
|
subdir = subdir[:-1]
|
||||||
|
subdirpath = os.path.join(ud.path, subdir);
|
||||||
|
else:
|
||||||
|
subdirpath = ud.path;
|
||||||
|
|
||||||
|
if 'fullclone' in ud.parm:
|
||||||
|
ud.localfile = ud.mirrortarball
|
||||||
|
else:
|
||||||
|
ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d)
|
||||||
|
|
||||||
|
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||||
|
|
||||||
|
if 'noclone' in ud.parm:
|
||||||
|
ud.localfile = None
|
||||||
|
return None
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def forcefetch(self, url, ud, d):
|
||||||
|
if 'fullclone' in ud.parm:
|
||||||
|
return True
|
||||||
|
if 'noclone' in ud.parm:
|
||||||
|
return False
|
||||||
|
if os.path.exists(ud.localpath):
|
||||||
|
return False
|
||||||
|
if not self._contains_ref(ud.tag, d):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def try_premirror(self, u, ud, d):
|
||||||
|
if 'noclone' in ud.parm:
|
||||||
|
return False
|
||||||
|
if os.path.exists(ud.clonedir):
|
||||||
|
return False
|
||||||
|
if os.path.exists(ud.localpath):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""Fetch url"""
|
||||||
|
|
||||||
|
if ud.user:
|
||||||
|
username = ud.user + '@'
|
||||||
|
else:
|
||||||
|
username = ""
|
||||||
|
|
||||||
|
repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
|
||||||
|
|
||||||
|
|
||||||
|
coname = '%s' % (ud.tag)
|
||||||
|
codir = os.path.join(ud.clonedir, coname)
|
||||||
|
|
||||||
|
# If we have no existing clone and no mirror tarball, try and obtain one
|
||||||
|
if not os.path.exists(ud.clonedir) and not os.path.exists(repofile):
|
||||||
|
try:
|
||||||
|
Fetch.try_mirrors(ud.mirrortarball)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||||
|
if not os.path.exists(ud.clonedir) and os.path.exists(repofile):
|
||||||
|
bb.utils.mkdirhier(ud.clonedir)
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
|
runfetchcmd("tar -xzf %s" % (repofile), d)
|
||||||
|
|
||||||
|
# If the repo still doesn't exist, fallback to cloning it
|
||||||
|
if not os.path.exists(ud.clonedir):
|
||||||
|
runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
|
||||||
|
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
|
# Update the checkout if needed
|
||||||
|
if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm:
|
||||||
|
# Remove all but the .git directory
|
||||||
|
runfetchcmd("rm * -Rf", d)
|
||||||
|
if 'fullclone' in ud.parm:
|
||||||
|
runfetchcmd("%s fetch --all" % (ud.basecmd), d)
|
||||||
|
else:
|
||||||
|
runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d)
|
||||||
|
runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
|
||||||
|
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||||
|
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||||
|
|
||||||
|
# Generate a mirror tarball if needed
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
|
mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
|
||||||
|
if mirror_tarballs != "0" or 'fullclone' in ud.parm:
|
||||||
|
logger.info("Creating tarball of git repository")
|
||||||
|
runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
|
||||||
|
|
||||||
|
if 'fullclone' in ud.parm:
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.exists(codir):
|
||||||
|
bb.utils.prunedir(codir)
|
||||||
|
|
||||||
|
subdir = ud.parm.get("subpath", "")
|
||||||
|
if subdir != "":
|
||||||
|
if subdir.endswith("/"):
|
||||||
|
subdirbase = os.path.basename(subdir[:-1])
|
||||||
|
else:
|
||||||
|
subdirbase = os.path.basename(subdir)
|
||||||
|
else:
|
||||||
|
subdirbase = ""
|
||||||
|
|
||||||
|
if subdir != "":
|
||||||
|
readpathspec = ":%s" % (subdir)
|
||||||
|
codir = os.path.join(codir, "git")
|
||||||
|
coprefix = os.path.join(codir, subdirbase, "")
|
||||||
|
else:
|
||||||
|
readpathspec = ""
|
||||||
|
coprefix = os.path.join(codir, "git", "")
|
||||||
|
|
||||||
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
|
if scmdata == "keep":
|
||||||
|
runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d)
|
||||||
|
os.chdir(coprefix)
|
||||||
|
runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
|
||||||
|
else:
|
||||||
|
bb.utils.mkdirhier(codir)
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
|
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
|
||||||
|
runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
|
||||||
|
|
||||||
|
os.chdir(codir)
|
||||||
|
logger.info("Creating tarball of git checkout")
|
||||||
|
runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
|
||||||
|
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
|
bb.utils.prunedir(codir)
|
||||||
|
|
||||||
|
def supports_srcrev(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _contains_ref(self, tag, d):
|
||||||
|
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||||
|
output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
|
||||||
|
return output.split()[0] != "0"
|
||||||
|
|
||||||
|
def _revision_key(self, url, ud, d, branch=False):
|
||||||
|
"""
|
||||||
|
Return a unique key for the url
|
||||||
|
"""
|
||||||
|
key = 'git:' + ud.host + ud.path.replace('/', '.')
|
||||||
|
if branch:
|
||||||
|
return key + ud.branch
|
||||||
|
else:
|
||||||
|
return key
|
||||||
|
|
||||||
|
def generate_revision_key(self, url, ud, d, branch=False):
|
||||||
|
key = self._revision_key(url, ud, d, branch)
|
||||||
|
return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
|
||||||
|
|
||||||
|
def _latest_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Compute the HEAD revision for the url
|
||||||
|
"""
|
||||||
|
if ud.user:
|
||||||
|
username = ud.user + '@'
|
||||||
|
else:
|
||||||
|
username = ""
|
||||||
|
|
||||||
|
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||||
|
cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch)
|
||||||
|
output = runfetchcmd(cmd, d, True)
|
||||||
|
if not output:
|
||||||
|
raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd))
|
||||||
|
return output.split()[0]
|
||||||
|
|
||||||
|
def latest_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Look in the cache for the latest revision, if not present ask the SCM.
|
||||||
|
"""
|
||||||
|
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
|
||||||
|
|
||||||
|
key = self.generate_revision_key(url, ud, d, branch=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return revs[key]
|
||||||
|
except KeyError:
|
||||||
|
# Compatibility with old key format, no branch included
|
||||||
|
oldkey = self.generate_revision_key(url, ud, d, branch=False)
|
||||||
|
try:
|
||||||
|
rev = revs[oldkey]
|
||||||
|
except KeyError:
|
||||||
|
rev = self._latest_revision(url, ud, d)
|
||||||
|
else:
|
||||||
|
del revs[oldkey]
|
||||||
|
revs[key] = rev
|
||||||
|
return rev
|
||||||
|
|
||||||
|
def sortable_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', d)
|
||||||
|
key = self.generate_revision_key(url, ud, d, branch=True)
|
||||||
|
oldkey = self.generate_revision_key(url, ud, d, branch=False)
|
||||||
|
|
||||||
|
latest_rev = self._build_revision(url, ud, d)
|
||||||
|
last_rev = localcounts.get(key + '_rev')
|
||||||
|
if last_rev is None:
|
||||||
|
last_rev = localcounts.get(oldkey + '_rev')
|
||||||
|
if last_rev is not None:
|
||||||
|
del localcounts[oldkey + '_rev']
|
||||||
|
localcounts[key + '_rev'] = last_rev
|
||||||
|
|
||||||
|
uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
|
||||||
|
count = None
|
||||||
|
if uselocalcount:
|
||||||
|
count = Fetch.localcount_internal_helper(ud, d)
|
||||||
|
if count is None:
|
||||||
|
count = localcounts.get(key + '_count')
|
||||||
|
if count is None:
|
||||||
|
count = localcounts.get(oldkey + '_count')
|
||||||
|
if count is not None:
|
||||||
|
del localcounts[oldkey + '_count']
|
||||||
|
localcounts[key + '_count'] = count
|
||||||
|
|
||||||
|
if last_rev == latest_rev:
|
||||||
|
return str(count + "+" + latest_rev)
|
||||||
|
|
||||||
|
buildindex_provided = hasattr(self, "_sortable_buildindex")
|
||||||
|
if buildindex_provided:
|
||||||
|
count = self._sortable_buildindex(url, ud, d, latest_rev)
|
||||||
|
if count is None:
|
||||||
|
count = "0"
|
||||||
|
elif uselocalcount or buildindex_provided:
|
||||||
|
count = str(count)
|
||||||
|
else:
|
||||||
|
count = str(int(count) + 1)
|
||||||
|
|
||||||
|
localcounts[key + '_rev'] = latest_rev
|
||||||
|
localcounts[key + '_count'] = count
|
||||||
|
|
||||||
|
return str(count + "+" + latest_rev)
|
||||||
|
|
||||||
|
def _build_revision(self, url, ud, d):
|
||||||
|
return ud.tag
|
||||||
|
|
||||||
|
def _sortable_buildindex_disabled(self, url, ud, d, rev):
|
||||||
|
"""
|
||||||
|
Return a suitable buildindex for the revision specified. This is done by counting revisions
|
||||||
|
using "git rev-list" which may or may not work in different circumstances.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cwd = os.getcwd()
|
||||||
|
|
||||||
|
# Check if we have the rev already
|
||||||
|
|
||||||
|
if not os.path.exists(ud.clonedir):
|
||||||
|
print("no repo")
|
||||||
|
self.go(None, ud, d)
|
||||||
|
if not os.path.exists(ud.clonedir):
|
||||||
|
logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
|
if not self._contains_ref(rev, d):
|
||||||
|
self.go(None, ud, d)
|
||||||
|
|
||||||
|
output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
|
||||||
|
os.chdir(cwd)
|
||||||
|
|
||||||
|
buildindex = "%s" % output.split()[0]
|
||||||
|
logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
|
||||||
|
return buildindex
|
|
@ -0,0 +1,180 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementation for mercurial DRCS (hg).
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
# Copyright (C) 2004 Marcin Juszkiewicz
|
||||||
|
# Copyright (C) 2007 Robert Schuster
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import FetchError
|
||||||
|
from bb.fetch import MissingParameterError
|
||||||
|
from bb.fetch import runfetchcmd
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
|
class Hg(Fetch):
|
||||||
|
"""Class to fetch from mercurial repositories"""
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with mercurial.
|
||||||
|
"""
|
||||||
|
return ud.type in ['hg']
|
||||||
|
|
||||||
|
def forcefetch(self, url, ud, d):
|
||||||
|
revTag = ud.parm.get('rev', 'tip')
|
||||||
|
return revTag == "tip"
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
if not "module" in ud.parm:
|
||||||
|
raise MissingParameterError("hg method needs a 'module' parameter")
|
||||||
|
|
||||||
|
ud.module = ud.parm["module"]
|
||||||
|
|
||||||
|
# Create paths to mercurial checkouts
|
||||||
|
relpath = self._strip_leading_slashes(ud.path)
|
||||||
|
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||||
|
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||||
|
|
||||||
|
if 'rev' in ud.parm:
|
||||||
|
ud.revision = ud.parm['rev']
|
||||||
|
else:
|
||||||
|
tag = Fetch.srcrev_internal_helper(ud, d)
|
||||||
|
if tag is True:
|
||||||
|
ud.revision = self.latest_revision(url, ud, d)
|
||||||
|
elif tag:
|
||||||
|
ud.revision = tag
|
||||||
|
else:
|
||||||
|
ud.revision = self.latest_revision(url, ud, d)
|
||||||
|
|
||||||
|
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def _buildhgcommand(self, ud, d, command):
|
||||||
|
"""
|
||||||
|
Build up an hg commandline based on ud
|
||||||
|
command is "fetch", "update", "info"
|
||||||
|
"""
|
||||||
|
|
||||||
|
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||||
|
|
||||||
|
proto = ud.parm.get('proto', 'http')
|
||||||
|
|
||||||
|
host = ud.host
|
||||||
|
if proto == "file":
|
||||||
|
host = "/"
|
||||||
|
ud.host = "localhost"
|
||||||
|
|
||||||
|
if not ud.user:
|
||||||
|
hgroot = host + ud.path
|
||||||
|
else:
|
||||||
|
hgroot = ud.user + "@" + host + ud.path
|
||||||
|
|
||||||
|
if command is "info":
|
||||||
|
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||||
|
|
||||||
|
options = [];
|
||||||
|
if ud.revision:
|
||||||
|
options.append("-r %s" % ud.revision)
|
||||||
|
|
||||||
|
if command is "fetch":
|
||||||
|
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||||
|
elif command is "pull":
|
||||||
|
# do not pass options list; limiting pull to rev causes the local
|
||||||
|
# repo not to contain it and immediately following "update" command
|
||||||
|
# will crash
|
||||||
|
cmd = "%s pull" % (basecmd)
|
||||||
|
elif command is "update":
|
||||||
|
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||||
|
else:
|
||||||
|
raise FetchError("Invalid hg command %s" % command)
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""Fetch url"""
|
||||||
|
|
||||||
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
|
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||||
|
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||||
|
logger.info("Update " + loc)
|
||||||
|
# update sources there
|
||||||
|
os.chdir(ud.moddir)
|
||||||
|
logger.debug(1, "Running %s", updatecmd)
|
||||||
|
runfetchcmd(updatecmd, d)
|
||||||
|
|
||||||
|
else:
|
||||||
|
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||||
|
logger.info("Fetch " + loc)
|
||||||
|
# check out sources there
|
||||||
|
bb.utils.mkdirhier(ud.pkgdir)
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
logger.debug(1, "Running %s", fetchcmd)
|
||||||
|
runfetchcmd(fetchcmd, d)
|
||||||
|
|
||||||
|
# Even when we clone (fetch), we still need to update as hg's clone
|
||||||
|
# won't checkout the specified revision if its on a branch
|
||||||
|
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||||
|
os.chdir(ud.moddir)
|
||||||
|
logger.debug(1, "Running %s", updatecmd)
|
||||||
|
runfetchcmd(updatecmd, d)
|
||||||
|
|
||||||
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
|
if scmdata == "keep":
|
||||||
|
tar_flags = ""
|
||||||
|
else:
|
||||||
|
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||||
|
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
try:
|
||||||
|
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
|
||||||
|
except:
|
||||||
|
t, v, tb = sys.exc_info()
|
||||||
|
try:
|
||||||
|
os.unlink(ud.localpath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise t, v, tb
|
||||||
|
|
||||||
|
def supports_srcrev(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _latest_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Compute tip revision for the url
|
||||||
|
"""
|
||||||
|
output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
|
||||||
|
return output.strip()
|
||||||
|
|
||||||
|
def _build_revision(self, url, ud, d):
|
||||||
|
return ud.revision
|
||||||
|
|
||||||
|
def _revision_key(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Return a unique key for the url
|
||||||
|
"""
|
||||||
|
return "hg:" + ud.moddir
|
|
@ -0,0 +1,73 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
Classes for obtaining upstream sources for the
|
||||||
|
BitBake build tools.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
import os
|
||||||
|
import bb
|
||||||
|
import bb.utils
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
|
||||||
|
class Local(Fetch):
|
||||||
|
def supports(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url represents a local fetch.
|
||||||
|
"""
|
||||||
|
return urldata.type in ['file']
|
||||||
|
|
||||||
|
def localpath(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Return the local filename of a given url assuming a successful fetch.
|
||||||
|
"""
|
||||||
|
path = url.split("://")[1]
|
||||||
|
path = path.split(";")[0]
|
||||||
|
newpath = path
|
||||||
|
if path[0] != "/":
|
||||||
|
filespath = data.getVar('FILESPATH', d, 1)
|
||||||
|
if filespath:
|
||||||
|
newpath = bb.utils.which(filespath, path)
|
||||||
|
if not newpath:
|
||||||
|
filesdir = data.getVar('FILESDIR', d, 1)
|
||||||
|
if filesdir:
|
||||||
|
newpath = os.path.join(filesdir, path)
|
||||||
|
# We don't set localfile as for this fetcher the file is already local!
|
||||||
|
return newpath
|
||||||
|
|
||||||
|
def go(self, url, urldata, d):
|
||||||
|
"""Fetch urls (no-op for Local method)"""
|
||||||
|
# no need to fetch local files, we'll deal with them in place.
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def checkstatus(self, url, urldata, d):
|
||||||
|
"""
|
||||||
|
Check the status of the url
|
||||||
|
"""
|
||||||
|
if urldata.localpath.find("*") != -1:
|
||||||
|
logger.info("URL %s looks like a glob and was therefore not checked.", url)
|
||||||
|
return True
|
||||||
|
if os.path.exists(urldata.localpath):
|
||||||
|
return True
|
||||||
|
return False
|
|
@ -0,0 +1,143 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
Bitbake "Fetch" implementation for osc (Opensuse build service client).
|
||||||
|
Based on the svn "Fetch" implementation.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb import utils
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import FetchError
|
||||||
|
from bb.fetch import MissingParameterError
|
||||||
|
from bb.fetch import runfetchcmd
|
||||||
|
|
||||||
|
class Osc(Fetch):
|
||||||
|
"""Class to fetch a module or modules from Opensuse build server
|
||||||
|
repositories."""
|
||||||
|
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with osc.
|
||||||
|
"""
|
||||||
|
return ud.type in ['osc']
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
if not "module" in ud.parm:
|
||||||
|
raise MissingParameterError("osc method needs a 'module' parameter.")
|
||||||
|
|
||||||
|
ud.module = ud.parm["module"]
|
||||||
|
|
||||||
|
# Create paths to osc checkouts
|
||||||
|
relpath = self._strip_leading_slashes(ud.path)
|
||||||
|
ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
|
||||||
|
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
|
||||||
|
|
||||||
|
if 'rev' in ud.parm:
|
||||||
|
ud.revision = ud.parm['rev']
|
||||||
|
else:
|
||||||
|
pv = data.getVar("PV", d, 0)
|
||||||
|
rev = Fetch.srcrev_internal_helper(ud, d)
|
||||||
|
if rev and rev != True:
|
||||||
|
ud.revision = rev
|
||||||
|
else:
|
||||||
|
ud.revision = ""
|
||||||
|
|
||||||
|
ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def _buildosccommand(self, ud, d, command):
|
||||||
|
"""
|
||||||
|
Build up an ocs commandline based on ud
|
||||||
|
command is "fetch", "update", "info"
|
||||||
|
"""
|
||||||
|
|
||||||
|
basecmd = data.expand('${FETCHCMD_osc}', d)
|
||||||
|
|
||||||
|
proto = ud.parm.get('proto', 'ocs')
|
||||||
|
|
||||||
|
options = []
|
||||||
|
|
||||||
|
config = "-c %s" % self.generate_config(ud, d)
|
||||||
|
|
||||||
|
if ud.revision:
|
||||||
|
options.append("-r %s" % ud.revision)
|
||||||
|
|
||||||
|
coroot = self._strip_leading_slashes(ud.path)
|
||||||
|
|
||||||
|
if command is "fetch":
|
||||||
|
osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
|
||||||
|
elif command is "update":
|
||||||
|
osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
|
||||||
|
else:
|
||||||
|
raise FetchError("Invalid osc command %s" % command)
|
||||||
|
|
||||||
|
return osccmd
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""
|
||||||
|
Fetch url
|
||||||
|
"""
|
||||||
|
|
||||||
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
|
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
||||||
|
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||||
|
logger.info("Update "+ loc)
|
||||||
|
# update sources there
|
||||||
|
os.chdir(ud.moddir)
|
||||||
|
logger.debug(1, "Running %s", oscupdatecmd)
|
||||||
|
runfetchcmd(oscupdatecmd, d)
|
||||||
|
else:
|
||||||
|
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
||||||
|
logger.info("Fetch " + loc)
|
||||||
|
# check out sources there
|
||||||
|
bb.utils.mkdirhier(ud.pkgdir)
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
logger.debug(1, "Running %s", oscfetchcmd)
|
||||||
|
runfetchcmd(oscfetchcmd, d)
|
||||||
|
|
||||||
|
os.chdir(os.path.join(ud.pkgdir + ud.path))
|
||||||
|
# tar them up to a defined filename
|
||||||
|
try:
|
||||||
|
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
|
||||||
|
except:
|
||||||
|
t, v, tb = sys.exc_info()
|
||||||
|
try:
|
||||||
|
os.unlink(ud.localpath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise t, v, tb
|
||||||
|
|
||||||
|
def supports_srcrev(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def generate_config(self, ud, d):
|
||||||
|
"""
|
||||||
|
Generate a .oscrc to be used for this run.
|
||||||
|
"""
|
||||||
|
|
||||||
|
config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
|
||||||
|
bb.utils.remove(config_path)
|
||||||
|
|
||||||
|
f = open(config_path, 'w')
|
||||||
|
f.write("[general]\n")
|
||||||
|
f.write("apisrv = %s\n" % ud.host)
|
||||||
|
f.write("scheme = http\n")
|
||||||
|
f.write("su-wrapper = su -c\n")
|
||||||
|
f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
|
||||||
|
f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
|
||||||
|
f.write("extra-pkgs = gzip\n")
|
||||||
|
f.write("\n")
|
||||||
|
f.write("[%s]\n" % ud.host)
|
||||||
|
f.write("user = %s\n" % ud.parm["user"])
|
||||||
|
f.write("pass = %s\n" % ud.parm["pswd"])
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
return config_path
|
|
@ -0,0 +1,206 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
Classes for obtaining upstream sources for the
|
||||||
|
BitBake build tools.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
from future_builtins import zip
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import FetchError
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
|
class Perforce(Fetch):
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
return ud.type in ['p4']
|
||||||
|
|
||||||
|
def doparse(url, d):
|
||||||
|
parm = {}
|
||||||
|
path = url.split("://")[1]
|
||||||
|
delim = path.find("@");
|
||||||
|
if delim != -1:
|
||||||
|
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||||
|
path = path.split('@')[1]
|
||||||
|
else:
|
||||||
|
(host, port) = data.getVar('P4PORT', d).split(':')
|
||||||
|
user = ""
|
||||||
|
pswd = ""
|
||||||
|
|
||||||
|
if path.find(";") != -1:
|
||||||
|
keys=[]
|
||||||
|
values=[]
|
||||||
|
plist = path.split(';')
|
||||||
|
for item in plist:
|
||||||
|
if item.count('='):
|
||||||
|
(key, value) = item.split('=')
|
||||||
|
keys.append(key)
|
||||||
|
values.append(value)
|
||||||
|
|
||||||
|
parm = dict(zip(keys, values))
|
||||||
|
path = "//" + path.split(';')[0]
|
||||||
|
host += ":%s" % (port)
|
||||||
|
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||||
|
|
||||||
|
return host, path, user, pswd, parm
|
||||||
|
doparse = staticmethod(doparse)
|
||||||
|
|
||||||
|
def getcset(d, depot, host, user, pswd, parm):
|
||||||
|
p4opt = ""
|
||||||
|
if "cset" in parm:
|
||||||
|
return parm["cset"];
|
||||||
|
if user:
|
||||||
|
p4opt += " -u %s" % (user)
|
||||||
|
if pswd:
|
||||||
|
p4opt += " -P %s" % (pswd)
|
||||||
|
if host:
|
||||||
|
p4opt += " -p %s" % (host)
|
||||||
|
|
||||||
|
p4date = data.getVar("P4DATE", d, 1)
|
||||||
|
if "revision" in parm:
|
||||||
|
depot += "#%s" % (parm["revision"])
|
||||||
|
elif "label" in parm:
|
||||||
|
depot += "@%s" % (parm["label"])
|
||||||
|
elif p4date:
|
||||||
|
depot += "@%s" % (p4date)
|
||||||
|
|
||||||
|
p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
|
||||||
|
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||||
|
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||||
|
cset = p4file.readline().strip()
|
||||||
|
logger.debug(1, "READ %s", cset)
|
||||||
|
if not cset:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
return cset.split(' ')[1]
|
||||||
|
getcset = staticmethod(getcset)
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
|
||||||
|
(host, path, user, pswd, parm) = Perforce.doparse(url, d)
|
||||||
|
|
||||||
|
# If a label is specified, we use that as our filename
|
||||||
|
|
||||||
|
if "label" in parm:
|
||||||
|
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
|
||||||
|
|
||||||
|
base = path
|
||||||
|
which = path.find('/...')
|
||||||
|
if which != -1:
|
||||||
|
base = path[:which]
|
||||||
|
|
||||||
|
base = self._strip_leading_slashes(base)
|
||||||
|
|
||||||
|
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||||
|
|
||||||
|
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""
|
||||||
|
Fetch urls
|
||||||
|
"""
|
||||||
|
|
||||||
|
(host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
|
||||||
|
|
||||||
|
if depot.find('/...') != -1:
|
||||||
|
path = depot[:depot.find('/...')]
|
||||||
|
else:
|
||||||
|
path = depot
|
||||||
|
|
||||||
|
module = parm.get('module', os.path.basename(path))
|
||||||
|
|
||||||
|
localdata = data.createCopy(d)
|
||||||
|
data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||||
|
data.update_data(localdata)
|
||||||
|
|
||||||
|
# Get the p4 command
|
||||||
|
p4opt = ""
|
||||||
|
if user:
|
||||||
|
p4opt += " -u %s" % (user)
|
||||||
|
|
||||||
|
if pswd:
|
||||||
|
p4opt += " -P %s" % (pswd)
|
||||||
|
|
||||||
|
if host:
|
||||||
|
p4opt += " -p %s" % (host)
|
||||||
|
|
||||||
|
p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
|
||||||
|
|
||||||
|
# create temp directory
|
||||||
|
logger.debug(2, "Fetch: creating temporary directory")
|
||||||
|
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||||
|
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||||
|
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||||
|
tmpfile = tmppipe.readline().strip()
|
||||||
|
if not tmpfile:
|
||||||
|
logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||||
|
raise FetchError(module)
|
||||||
|
|
||||||
|
if "label" in parm:
|
||||||
|
depot = "%s@%s" % (depot, parm["label"])
|
||||||
|
else:
|
||||||
|
cset = Perforce.getcset(d, depot, host, user, pswd, parm)
|
||||||
|
depot = "%s@%s" % (depot, cset)
|
||||||
|
|
||||||
|
os.chdir(tmpfile)
|
||||||
|
logger.info("Fetch " + loc)
|
||||||
|
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||||
|
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||||
|
|
||||||
|
if not p4file:
|
||||||
|
logger.error("Fetch: unable to get the P4 files from %s", depot)
|
||||||
|
raise FetchError(module)
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
|
||||||
|
for file in p4file:
|
||||||
|
list = file.split()
|
||||||
|
|
||||||
|
if list[2] == "delete":
|
||||||
|
continue
|
||||||
|
|
||||||
|
dest = list[0][len(path)+1:]
|
||||||
|
where = dest.find("#")
|
||||||
|
|
||||||
|
os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
|
||||||
|
count = count + 1
|
||||||
|
|
||||||
|
if count == 0:
|
||||||
|
logger.error("Fetch: No files gathered from the P4 fetch")
|
||||||
|
raise FetchError(module)
|
||||||
|
|
||||||
|
myret = os.system("tar -czf %s %s" % (ud.localpath, module))
|
||||||
|
if myret != 0:
|
||||||
|
try:
|
||||||
|
os.unlink(ud.localpath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise FetchError(module)
|
||||||
|
# cleanup
|
||||||
|
bb.utils.prunedir(tmpfile)
|
|
@ -0,0 +1,98 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake "Fetch" repo (git) implementation
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
|
||||||
|
#
|
||||||
|
# Based on git.py which is:
|
||||||
|
#Copyright (C) 2005 Richard Purdie
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import runfetchcmd
|
||||||
|
|
||||||
|
class Repo(Fetch):
|
||||||
|
"""Class to fetch a module or modules from repo (git) repositories"""
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with repo.
|
||||||
|
"""
|
||||||
|
return ud.type in ["repo"]
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
We don"t care about the git rev of the manifests repository, but
|
||||||
|
we do care about the manifest to use. The default is "default".
|
||||||
|
We also care about the branch or tag to be used. The default is
|
||||||
|
"master".
|
||||||
|
"""
|
||||||
|
|
||||||
|
ud.proto = ud.parm.get('protocol', 'git')
|
||||||
|
ud.branch = ud.parm.get('branch', 'master')
|
||||||
|
ud.manifest = ud.parm.get('manifest', 'default.xml')
|
||||||
|
if not ud.manifest.endswith('.xml'):
|
||||||
|
ud.manifest += '.xml'
|
||||||
|
|
||||||
|
ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""Fetch url"""
|
||||||
|
|
||||||
|
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
|
||||||
|
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
|
||||||
|
return
|
||||||
|
|
||||||
|
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
|
||||||
|
repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
|
||||||
|
codir = os.path.join(repodir, gitsrcname, ud.manifest)
|
||||||
|
|
||||||
|
if ud.user:
|
||||||
|
username = ud.user + "@"
|
||||||
|
else:
|
||||||
|
username = ""
|
||||||
|
|
||||||
|
bb.utils.mkdirhier(os.path.join(codir, "repo"))
|
||||||
|
os.chdir(os.path.join(codir, "repo"))
|
||||||
|
if not os.path.exists(os.path.join(codir, "repo", ".repo")):
|
||||||
|
runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
|
||||||
|
|
||||||
|
runfetchcmd("repo sync", d)
|
||||||
|
os.chdir(codir)
|
||||||
|
|
||||||
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
|
if scmdata == "keep":
|
||||||
|
tar_flags = ""
|
||||||
|
else:
|
||||||
|
tar_flags = "--exclude '.repo' --exclude '.git'"
|
||||||
|
|
||||||
|
# Create a cache
|
||||||
|
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
|
||||||
|
|
||||||
|
def supports_srcrev(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _build_revision(self, url, ud, d):
|
||||||
|
return ud.manifest
|
||||||
|
|
||||||
|
def _want_sortable_revision(self, url, ud, d):
|
||||||
|
return False
|
|
@ -0,0 +1,118 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
'''
|
||||||
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
This implementation is for Secure Shell (SSH), and attempts to comply with the
|
||||||
|
IETF secsh internet draft:
|
||||||
|
http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
|
||||||
|
|
||||||
|
Currently does not support the sftp parameters, as this uses scp
|
||||||
|
Also does not support the 'fingerprint' connection parameter.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Copyright (C) 2006 OpenedHand Ltd.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# Based in part on svk.py:
|
||||||
|
# Copyright (C) 2006 Holger Hans Peter Freyther
|
||||||
|
# Based on svn.py:
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
# Based on functions from the base bb module:
|
||||||
|
# Copyright 2003 Holger Schurig
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
|
import re, os
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import FetchError
|
||||||
|
|
||||||
|
|
||||||
|
__pattern__ = re.compile(r'''
|
||||||
|
\s* # Skip leading whitespace
|
||||||
|
ssh:// # scheme
|
||||||
|
( # Optional username/password block
|
||||||
|
(?P<user>\S+) # username
|
||||||
|
(:(?P<pass>\S+))? # colon followed by the password (optional)
|
||||||
|
)?
|
||||||
|
(?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
|
||||||
|
@
|
||||||
|
(?P<host>\S+?) # non-greedy match of the host
|
||||||
|
(:(?P<port>[0-9]+))? # colon followed by the port (optional)
|
||||||
|
/
|
||||||
|
(?P<path>[^;]+) # path on the remote system, may be absolute or relative,
|
||||||
|
# and may include the use of '~' to reference the remote home
|
||||||
|
# directory
|
||||||
|
(?P<sparam>(;[^;]+)*)? # parameters block (optional)
|
||||||
|
$
|
||||||
|
''', re.VERBOSE)
|
||||||
|
|
||||||
|
class SSH(Fetch):
|
||||||
|
'''Class to fetch a module or modules via Secure Shell'''
|
||||||
|
|
||||||
|
def supports(self, url, urldata, d):
|
||||||
|
return __pattern__.match(url) != None
|
||||||
|
|
||||||
|
def localpath(self, url, urldata, d):
|
||||||
|
m = __pattern__.match(url)
|
||||||
|
path = m.group('path')
|
||||||
|
host = m.group('host')
|
||||||
|
lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
|
||||||
|
return lpath
|
||||||
|
|
||||||
|
def go(self, url, urldata, d):
|
||||||
|
dldir = data.getVar('DL_DIR', d, 1)
|
||||||
|
|
||||||
|
m = __pattern__.match(url)
|
||||||
|
path = m.group('path')
|
||||||
|
host = m.group('host')
|
||||||
|
port = m.group('port')
|
||||||
|
user = m.group('user')
|
||||||
|
password = m.group('pass')
|
||||||
|
|
||||||
|
ldir = os.path.join(dldir, host)
|
||||||
|
lpath = os.path.join(ldir, os.path.basename(path))
|
||||||
|
|
||||||
|
if not os.path.exists(ldir):
|
||||||
|
os.makedirs(ldir)
|
||||||
|
|
||||||
|
if port:
|
||||||
|
port = '-P %s' % port
|
||||||
|
else:
|
||||||
|
port = ''
|
||||||
|
|
||||||
|
if user:
|
||||||
|
fr = user
|
||||||
|
if password:
|
||||||
|
fr += ':%s' % password
|
||||||
|
fr += '@%s' % host
|
||||||
|
else:
|
||||||
|
fr = host
|
||||||
|
fr += ':%s' % path
|
||||||
|
|
||||||
|
|
||||||
|
import commands
|
||||||
|
cmd = 'scp -B -r %s %s %s/' % (
|
||||||
|
port,
|
||||||
|
commands.mkarg(fr),
|
||||||
|
commands.mkarg(ldir)
|
||||||
|
)
|
||||||
|
|
||||||
|
(exitstatus, output) = commands.getstatusoutput(cmd)
|
||||||
|
if exitstatus != 0:
|
||||||
|
print(output)
|
||||||
|
raise FetchError('Unable to fetch %s' % url)
|
|
@ -0,0 +1,104 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
This implementation is for svk. It is based on the svn implementation
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2006 Holger Hans Peter Freyther
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import FetchError
|
||||||
|
from bb.fetch import MissingParameterError
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
|
class Svk(Fetch):
|
||||||
|
"""Class to fetch a module or modules from svk repositories"""
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with svk.
|
||||||
|
"""
|
||||||
|
return ud.type in ['svk']
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
if not "module" in ud.parm:
|
||||||
|
raise MissingParameterError("svk method needs a 'module' parameter")
|
||||||
|
else:
|
||||||
|
ud.module = ud.parm["module"]
|
||||||
|
|
||||||
|
ud.revision = ud.parm.get('rev', "")
|
||||||
|
|
||||||
|
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def forcefetch(self, url, ud, d):
|
||||||
|
return ud.date == "now"
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""Fetch urls"""
|
||||||
|
|
||||||
|
svkroot = ud.host + ud.path
|
||||||
|
|
||||||
|
svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
|
||||||
|
|
||||||
|
if ud.revision:
|
||||||
|
svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
|
||||||
|
|
||||||
|
# create temp directory
|
||||||
|
localdata = data.createCopy(d)
|
||||||
|
data.update_data(localdata)
|
||||||
|
logger.debug(2, "Fetch: creating temporary directory")
|
||||||
|
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||||
|
data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
|
||||||
|
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
|
||||||
|
tmpfile = tmppipe.readline().strip()
|
||||||
|
if not tmpfile:
|
||||||
|
logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
|
||||||
|
raise FetchError(ud.module)
|
||||||
|
|
||||||
|
# check out sources there
|
||||||
|
os.chdir(tmpfile)
|
||||||
|
logger.info("Fetch " + loc)
|
||||||
|
logger.debug(1, "Running %s", svkcmd)
|
||||||
|
myret = os.system(svkcmd)
|
||||||
|
if myret != 0:
|
||||||
|
try:
|
||||||
|
os.rmdir(tmpfile)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise FetchError(ud.module)
|
||||||
|
|
||||||
|
os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
|
||||||
|
# tar them up to a defined filename
|
||||||
|
myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
|
||||||
|
if myret != 0:
|
||||||
|
try:
|
||||||
|
os.unlink(ud.localpath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise FetchError(ud.module)
|
||||||
|
# cleanup
|
||||||
|
bb.utils.prunedir(tmpfile)
|
|
@ -0,0 +1,204 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementation for svn.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
# Copyright (C) 2004 Marcin Juszkiewicz
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch
|
||||||
|
from bb.fetch import FetchError
|
||||||
|
from bb.fetch import MissingParameterError
|
||||||
|
from bb.fetch import runfetchcmd
|
||||||
|
from bb.fetch import logger
|
||||||
|
|
||||||
|
class Svn(Fetch):
|
||||||
|
"""Class to fetch a module or modules from svn repositories"""
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with svn.
|
||||||
|
"""
|
||||||
|
return ud.type in ['svn']
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
if not "module" in ud.parm:
|
||||||
|
raise MissingParameterError("svn method needs a 'module' parameter")
|
||||||
|
|
||||||
|
ud.module = ud.parm["module"]
|
||||||
|
|
||||||
|
# Create paths to svn checkouts
|
||||||
|
relpath = self._strip_leading_slashes(ud.path)
|
||||||
|
ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
|
||||||
|
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||||
|
|
||||||
|
if 'rev' in ud.parm:
|
||||||
|
ud.date = ""
|
||||||
|
ud.revision = ud.parm['rev']
|
||||||
|
elif 'date' in ud.date:
|
||||||
|
ud.date = ud.parm['date']
|
||||||
|
ud.revision = ""
|
||||||
|
else:
|
||||||
|
#
|
||||||
|
# ***Nasty hack***
|
||||||
|
# If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
|
||||||
|
# Should warn people to switch to SRCREV here
|
||||||
|
#
|
||||||
|
pv = data.getVar("PV", d, 0)
|
||||||
|
if "DATE" in pv:
|
||||||
|
ud.revision = ""
|
||||||
|
else:
|
||||||
|
rev = Fetch.srcrev_internal_helper(ud, d)
|
||||||
|
if rev is True:
|
||||||
|
ud.revision = self.latest_revision(url, ud, d)
|
||||||
|
ud.date = ""
|
||||||
|
elif rev:
|
||||||
|
ud.revision = rev
|
||||||
|
ud.date = ""
|
||||||
|
else:
|
||||||
|
ud.revision = ""
|
||||||
|
|
||||||
|
ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def _buildsvncommand(self, ud, d, command):
|
||||||
|
"""
|
||||||
|
Build up an svn commandline based on ud
|
||||||
|
command is "fetch", "update", "info"
|
||||||
|
"""
|
||||||
|
|
||||||
|
basecmd = data.expand('${FETCHCMD_svn}', d)
|
||||||
|
|
||||||
|
proto = ud.parm.get('proto', 'svn')
|
||||||
|
|
||||||
|
svn_rsh = None
|
||||||
|
if proto == "svn+ssh" and "rsh" in ud.parm:
|
||||||
|
svn_rsh = ud.parm["rsh"]
|
||||||
|
|
||||||
|
svnroot = ud.host + ud.path
|
||||||
|
|
||||||
|
# either use the revision, or SRCDATE in braces,
|
||||||
|
options = []
|
||||||
|
|
||||||
|
if ud.user:
|
||||||
|
options.append("--username %s" % ud.user)
|
||||||
|
|
||||||
|
if ud.pswd:
|
||||||
|
options.append("--password %s" % ud.pswd)
|
||||||
|
|
||||||
|
if command is "info":
|
||||||
|
svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
|
||||||
|
else:
|
||||||
|
suffix = ""
|
||||||
|
if ud.revision:
|
||||||
|
options.append("-r %s" % ud.revision)
|
||||||
|
suffix = "@%s" % (ud.revision)
|
||||||
|
elif ud.date:
|
||||||
|
options.append("-r {%s}" % ud.date)
|
||||||
|
|
||||||
|
if command is "fetch":
|
||||||
|
svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
|
||||||
|
elif command is "update":
|
||||||
|
svncmd = "%s update %s" % (basecmd, " ".join(options))
|
||||||
|
else:
|
||||||
|
raise FetchError("Invalid svn command %s" % command)
|
||||||
|
|
||||||
|
if svn_rsh:
|
||||||
|
svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
|
||||||
|
|
||||||
|
return svncmd
|
||||||
|
|
||||||
|
def go(self, loc, ud, d):
|
||||||
|
"""Fetch url"""
|
||||||
|
|
||||||
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
|
if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
|
||||||
|
svnupdatecmd = self._buildsvncommand(ud, d, "update")
|
||||||
|
logger.info("Update " + loc)
|
||||||
|
# update sources there
|
||||||
|
os.chdir(ud.moddir)
|
||||||
|
logger.debug(1, "Running %s", svnupdatecmd)
|
||||||
|
runfetchcmd(svnupdatecmd, d)
|
||||||
|
else:
|
||||||
|
svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
|
||||||
|
logger.info("Fetch " + loc)
|
||||||
|
# check out sources there
|
||||||
|
bb.utils.mkdirhier(ud.pkgdir)
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
logger.debug(1, "Running %s", svnfetchcmd)
|
||||||
|
runfetchcmd(svnfetchcmd, d)
|
||||||
|
|
||||||
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
|
if scmdata == "keep":
|
||||||
|
tar_flags = ""
|
||||||
|
else:
|
||||||
|
tar_flags = "--exclude '.svn'"
|
||||||
|
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
# tar them up to a defined filename
|
||||||
|
try:
|
||||||
|
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
|
||||||
|
except:
|
||||||
|
t, v, tb = sys.exc_info()
|
||||||
|
try:
|
||||||
|
os.unlink(ud.localpath)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise t, v, tb
|
||||||
|
|
||||||
|
def supports_srcrev(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _revision_key(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Return a unique key for the url
|
||||||
|
"""
|
||||||
|
return "svn:" + ud.moddir
|
||||||
|
|
||||||
|
def _latest_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Return the latest upstream revision number
|
||||||
|
"""
|
||||||
|
logger.debug(2, "SVN fetcher hitting network for %s", url)
|
||||||
|
|
||||||
|
output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
|
||||||
|
|
||||||
|
revision = None
|
||||||
|
for line in output.splitlines():
|
||||||
|
if "Last Changed Rev" in line:
|
||||||
|
revision = line.split(":")[1].strip()
|
||||||
|
|
||||||
|
return revision
|
||||||
|
|
||||||
|
def _sortable_revision(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Return a sortable revision number which in our case is the revision number
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self._build_revision(url, ud, d)
|
||||||
|
|
||||||
|
def _build_revision(self, url, ud, d):
|
||||||
|
return ud.revision
|
|
@ -0,0 +1,93 @@
|
||||||
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
|
"""
|
||||||
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
Classes for obtaining upstream sources for the
|
||||||
|
BitBake build tools.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
|
#
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
# published by the Free Software Foundation.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License along
|
||||||
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
#
|
||||||
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import bb
|
||||||
|
import urllib
|
||||||
|
from bb import data
|
||||||
|
from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
|
||||||
|
|
||||||
|
class Wget(Fetch):
|
||||||
|
"""Class to fetch urls via 'wget'"""
|
||||||
|
def supports(self, url, ud, d):
|
||||||
|
"""
|
||||||
|
Check to see if a given url can be fetched with wget.
|
||||||
|
"""
|
||||||
|
return ud.type in ['http', 'https', 'ftp']
|
||||||
|
|
||||||
|
def localpath(self, url, ud, d):
|
||||||
|
|
||||||
|
url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
|
||||||
|
ud.basename = os.path.basename(ud.path)
|
||||||
|
ud.localfile = data.expand(urllib.unquote(ud.basename), d)
|
||||||
|
|
||||||
|
return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
|
||||||
|
|
||||||
|
def go(self, uri, ud, d, checkonly = False):
|
||||||
|
"""Fetch urls"""
|
||||||
|
|
||||||
|
def fetch_uri(uri, ud, d):
|
||||||
|
if checkonly:
|
||||||
|
fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
|
||||||
|
elif os.path.exists(ud.localpath):
|
||||||
|
# file exists, but we didnt complete it.. trying again..
|
||||||
|
fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
|
||||||
|
else:
|
||||||
|
fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
|
||||||
|
|
||||||
|
uri = uri.split(";")[0]
|
||||||
|
uri_decoded = list(decodeurl(uri))
|
||||||
|
uri_type = uri_decoded[0]
|
||||||
|
uri_host = uri_decoded[1]
|
||||||
|
|
||||||
|
fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
|
||||||
|
fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
|
||||||
|
logger.info("fetch " + uri)
|
||||||
|
logger.debug(2, "executing " + fetchcmd)
|
||||||
|
runfetchcmd(fetchcmd, d)
|
||||||
|
|
||||||
|
# Sanity check since wget can pretend it succeed when it didn't
|
||||||
|
# Also, this used to happen if sourceforge sent us to the mirror page
|
||||||
|
if not os.path.exists(ud.localpath) and not checkonly:
|
||||||
|
logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
localdata = data.createCopy(d)
|
||||||
|
data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
|
||||||
|
data.update_data(localdata)
|
||||||
|
|
||||||
|
if fetch_uri(uri, ud, localdata):
|
||||||
|
return True
|
||||||
|
|
||||||
|
raise FetchError(uri)
|
||||||
|
|
||||||
|
|
||||||
|
def checkstatus(self, uri, ud, d):
|
||||||
|
return self.go(uri, ud, d, True)
|
File diff suppressed because it is too large
Load Diff
|
@ -27,13 +27,14 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import bb
|
import bb
|
||||||
|
from bb import data
|
||||||
from bb.fetch2 import FetchMethod
|
from bb.fetch2 import FetchMethod
|
||||||
from bb.fetch2 import FetchError
|
from bb.fetch2 import FetchError
|
||||||
from bb.fetch2 import runfetchcmd
|
from bb.fetch2 import runfetchcmd
|
||||||
from bb.fetch2 import logger
|
from bb.fetch2 import logger
|
||||||
|
|
||||||
class Bzr(FetchMethod):
|
class Bzr(FetchMethod):
|
||||||
def supports(self, ud, d):
|
def supports(self, url, ud, d):
|
||||||
return ud.type in ['bzr']
|
return ud.type in ['bzr']
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
def urldata_init(self, ud, d):
|
||||||
|
@ -42,14 +43,14 @@ class Bzr(FetchMethod):
|
||||||
"""
|
"""
|
||||||
# Create paths to bzr checkouts
|
# Create paths to bzr checkouts
|
||||||
relpath = self._strip_leading_slashes(ud.path)
|
relpath = self._strip_leading_slashes(ud.path)
|
||||||
ud.pkgdir = os.path.join(d.expand('${BZRDIR}'), ud.host, relpath)
|
ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
|
||||||
|
|
||||||
ud.setup_revisions(d)
|
ud.setup_revisons(d)
|
||||||
|
|
||||||
if not ud.revision:
|
if not ud.revision:
|
||||||
ud.revision = self.latest_revision(ud, d)
|
ud.revision = self.latest_revision(ud.url, ud, d)
|
||||||
|
|
||||||
ud.localfile = d.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision))
|
ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||||
|
|
||||||
def _buildbzrcommand(self, ud, d, command):
|
def _buildbzrcommand(self, ud, d, command):
|
||||||
"""
|
"""
|
||||||
|
@ -57,9 +58,9 @@ class Bzr(FetchMethod):
|
||||||
command is "fetch", "update", "revno"
|
command is "fetch", "update", "revno"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
basecmd = d.expand('${FETCHCMD_bzr}')
|
basecmd = data.expand('${FETCHCMD_bzr}', d)
|
||||||
|
|
||||||
proto = ud.parm.get('protocol', 'http')
|
proto = ud.parm.get('proto', 'http')
|
||||||
|
|
||||||
bzrroot = ud.host + ud.path
|
bzrroot = ud.host + ud.path
|
||||||
|
|
||||||
|
@ -72,7 +73,7 @@ class Bzr(FetchMethod):
|
||||||
options.append("-r %s" % ud.revision)
|
options.append("-r %s" % ud.revision)
|
||||||
|
|
||||||
if command == "fetch":
|
if command == "fetch":
|
||||||
bzrcmd = "%s branch %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
|
||||||
elif command == "update":
|
elif command == "update":
|
||||||
bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
|
bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
|
||||||
else:
|
else:
|
||||||
|
@ -80,47 +81,50 @@ class Bzr(FetchMethod):
|
||||||
|
|
||||||
return bzrcmd
|
return bzrcmd
|
||||||
|
|
||||||
def download(self, ud, d):
|
def download(self, loc, ud, d):
|
||||||
"""Fetch url"""
|
"""Fetch url"""
|
||||||
|
|
||||||
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
|
||||||
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
bzrcmd = self._buildbzrcommand(ud, d, "update")
|
||||||
logger.debug(1, "BZR Update %s", ud.url)
|
logger.debug(1, "BZR Update %s", loc)
|
||||||
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
||||||
runfetchcmd(bzrcmd, d, workdir=os.path.join(ud.pkgdir, os.path.basename(ud.path)))
|
os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
|
||||||
|
runfetchcmd(bzrcmd, d)
|
||||||
else:
|
else:
|
||||||
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
|
bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
|
||||||
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
bzrcmd = self._buildbzrcommand(ud, d, "fetch")
|
||||||
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
bb.fetch2.check_network_access(d, bzrcmd, ud.url)
|
||||||
logger.debug(1, "BZR Checkout %s", ud.url)
|
logger.debug(1, "BZR Checkout %s", loc)
|
||||||
bb.utils.mkdirhier(ud.pkgdir)
|
bb.utils.mkdirhier(ud.pkgdir)
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
logger.debug(1, "Running %s", bzrcmd)
|
logger.debug(1, "Running %s", bzrcmd)
|
||||||
runfetchcmd(bzrcmd, d, workdir=ud.pkgdir)
|
runfetchcmd(bzrcmd, d)
|
||||||
|
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
|
|
||||||
scmdata = ud.parm.get("scmdata", "")
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
if scmdata == "keep":
|
if scmdata == "keep":
|
||||||
tar_flags = ""
|
tar_flags = ""
|
||||||
else:
|
else:
|
||||||
tar_flags = "--exclude='.bzr' --exclude='.bzrtags'"
|
tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
|
||||||
|
|
||||||
# tar them up to a defined filename
|
# tar them up to a defined filename
|
||||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)),
|
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
|
||||||
d, cleanup=[ud.localpath], workdir=ud.pkgdir)
|
|
||||||
|
|
||||||
def supports_srcrev(self):
|
def supports_srcrev(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _revision_key(self, ud, d, name):
|
def _revision_key(self, url, ud, d, name):
|
||||||
"""
|
"""
|
||||||
Return a unique key for the url
|
Return a unique key for the url
|
||||||
"""
|
"""
|
||||||
return "bzr:" + ud.pkgdir
|
return "bzr:" + ud.pkgdir
|
||||||
|
|
||||||
def _latest_revision(self, ud, d, name):
|
def _latest_revision(self, url, ud, d, name):
|
||||||
"""
|
"""
|
||||||
Return the latest upstream revision number
|
Return the latest upstream revision number
|
||||||
"""
|
"""
|
||||||
logger.debug(2, "BZR fetcher hitting network for %s", ud.url)
|
logger.debug(2, "BZR fetcher hitting network for %s", url)
|
||||||
|
|
||||||
bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
|
bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
|
||||||
|
|
||||||
|
@ -128,12 +132,12 @@ class Bzr(FetchMethod):
|
||||||
|
|
||||||
return output.strip()
|
return output.strip()
|
||||||
|
|
||||||
def sortable_revision(self, ud, d, name):
|
def _sortable_revision(self, url, ud, d):
|
||||||
"""
|
"""
|
||||||
Return a sortable revision number which in our case is the revision number
|
Return a sortable revision number which in our case is the revision number
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return False, self._build_revision(ud, d)
|
return self._build_revision(url, ud, d)
|
||||||
|
|
||||||
def _build_revision(self, ud, d):
|
def _build_revision(self, url, ud, d):
|
||||||
return ud.revision
|
return ud.revision
|
||||||
|
|
|
@ -1,259 +0,0 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
"""
|
|
||||||
BitBake 'Fetch' clearcase implementation
|
|
||||||
|
|
||||||
The clearcase fetcher is used to retrieve files from a ClearCase repository.
|
|
||||||
|
|
||||||
Usage in the recipe:
|
|
||||||
|
|
||||||
SRC_URI = "ccrc://cc.example.org/ccrc;vob=/example_vob;module=/example_module"
|
|
||||||
SRCREV = "EXAMPLE_CLEARCASE_TAG"
|
|
||||||
PV = "${@d.getVar("SRCREV", False).replace("/", "+")}"
|
|
||||||
|
|
||||||
The fetcher uses the rcleartool or cleartool remote client, depending on which one is available.
|
|
||||||
|
|
||||||
Supported SRC_URI options are:
|
|
||||||
|
|
||||||
- vob
|
|
||||||
(required) The name of the clearcase VOB (with prepending "/")
|
|
||||||
|
|
||||||
- module
|
|
||||||
The module in the selected VOB (with prepending "/")
|
|
||||||
|
|
||||||
The module and vob parameters are combined to create
|
|
||||||
the following load rule in the view config spec:
|
|
||||||
load <vob><module>
|
|
||||||
|
|
||||||
- proto
|
|
||||||
http or https
|
|
||||||
|
|
||||||
Related variables:
|
|
||||||
|
|
||||||
CCASE_CUSTOM_CONFIG_SPEC
|
|
||||||
Write a config spec to this variable in your recipe to use it instead
|
|
||||||
of the default config spec generated by this fetcher.
|
|
||||||
Please note that the SRCREV loses its functionality if you specify
|
|
||||||
this variable. SRCREV is still used to label the archive after a fetch,
|
|
||||||
but it doesn't define what's fetched.
|
|
||||||
|
|
||||||
User credentials:
|
|
||||||
cleartool:
|
|
||||||
The login of cleartool is handled by the system. No special steps needed.
|
|
||||||
|
|
||||||
rcleartool:
|
|
||||||
In order to use rcleartool with authenticated users an `rcleartool login` is
|
|
||||||
necessary before using the fetcher.
|
|
||||||
"""
|
|
||||||
# Copyright (C) 2014 Siemens AG
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
#
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import shutil
|
|
||||||
import bb
|
|
||||||
from bb.fetch2 import FetchMethod
|
|
||||||
from bb.fetch2 import FetchError
|
|
||||||
from bb.fetch2 import runfetchcmd
|
|
||||||
from bb.fetch2 import logger
|
|
||||||
|
|
||||||
class ClearCase(FetchMethod):
|
|
||||||
"""Class to fetch urls via 'clearcase'"""
|
|
||||||
def init(self, d):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def supports(self, ud, d):
|
|
||||||
"""
|
|
||||||
Check to see if a given url can be fetched with Clearcase.
|
|
||||||
"""
|
|
||||||
return ud.type in ['ccrc']
|
|
||||||
|
|
||||||
def debug(self, msg):
|
|
||||||
logger.debug(1, "ClearCase: %s", msg)
|
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
|
||||||
"""
|
|
||||||
init ClearCase specific variable within url data
|
|
||||||
"""
|
|
||||||
ud.proto = "https"
|
|
||||||
if 'protocol' in ud.parm:
|
|
||||||
ud.proto = ud.parm['protocol']
|
|
||||||
if not ud.proto in ('http', 'https'):
|
|
||||||
raise fetch2.ParameterError("Invalid protocol type", ud.url)
|
|
||||||
|
|
||||||
ud.vob = ''
|
|
||||||
if 'vob' in ud.parm:
|
|
||||||
ud.vob = ud.parm['vob']
|
|
||||||
else:
|
|
||||||
msg = ud.url+": vob must be defined so the fetcher knows what to get."
|
|
||||||
raise MissingParameterError('vob', msg)
|
|
||||||
|
|
||||||
if 'module' in ud.parm:
|
|
||||||
ud.module = ud.parm['module']
|
|
||||||
else:
|
|
||||||
ud.module = ""
|
|
||||||
|
|
||||||
ud.basecmd = d.getVar("FETCHCMD_ccrc") or "/usr/bin/env cleartool || rcleartool"
|
|
||||||
|
|
||||||
if d.getVar("SRCREV") == "INVALID":
|
|
||||||
raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.")
|
|
||||||
|
|
||||||
ud.label = d.getVar("SRCREV", False)
|
|
||||||
ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC")
|
|
||||||
|
|
||||||
ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path)
|
|
||||||
|
|
||||||
ud.identifier = "clearcase-%s%s-%s" % ( ud.vob.replace("/", ""),
|
|
||||||
ud.module.replace("/", "."),
|
|
||||||
ud.label.replace("/", "."))
|
|
||||||
|
|
||||||
ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True))
|
|
||||||
ud.csname = "%s-config-spec" % (ud.identifier)
|
|
||||||
ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type)
|
|
||||||
ud.viewdir = os.path.join(ud.ccasedir, ud.viewname)
|
|
||||||
ud.configspecfile = os.path.join(ud.ccasedir, ud.csname)
|
|
||||||
ud.localfile = "%s.tar.gz" % (ud.identifier)
|
|
||||||
|
|
||||||
self.debug("host = %s" % ud.host)
|
|
||||||
self.debug("path = %s" % ud.path)
|
|
||||||
self.debug("server = %s" % ud.server)
|
|
||||||
self.debug("proto = %s" % ud.proto)
|
|
||||||
self.debug("type = %s" % ud.type)
|
|
||||||
self.debug("vob = %s" % ud.vob)
|
|
||||||
self.debug("module = %s" % ud.module)
|
|
||||||
self.debug("basecmd = %s" % ud.basecmd)
|
|
||||||
self.debug("label = %s" % ud.label)
|
|
||||||
self.debug("ccasedir = %s" % ud.ccasedir)
|
|
||||||
self.debug("viewdir = %s" % ud.viewdir)
|
|
||||||
self.debug("viewname = %s" % ud.viewname)
|
|
||||||
self.debug("configspecfile = %s" % ud.configspecfile)
|
|
||||||
self.debug("localfile = %s" % ud.localfile)
|
|
||||||
|
|
||||||
ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile)
|
|
||||||
|
|
||||||
def _build_ccase_command(self, ud, command):
|
|
||||||
"""
|
|
||||||
Build up a commandline based on ud
|
|
||||||
command is: mkview, setcs, rmview
|
|
||||||
"""
|
|
||||||
options = []
|
|
||||||
|
|
||||||
if "rcleartool" in ud.basecmd:
|
|
||||||
options.append("-server %s" % ud.server)
|
|
||||||
|
|
||||||
basecmd = "%s %s" % (ud.basecmd, command)
|
|
||||||
|
|
||||||
if command is 'mkview':
|
|
||||||
if not "rcleartool" in ud.basecmd:
|
|
||||||
# Cleartool needs a -snapshot view
|
|
||||||
options.append("-snapshot")
|
|
||||||
options.append("-tag %s" % ud.viewname)
|
|
||||||
options.append(ud.viewdir)
|
|
||||||
|
|
||||||
elif command is 'rmview':
|
|
||||||
options.append("-force")
|
|
||||||
options.append("%s" % ud.viewdir)
|
|
||||||
|
|
||||||
elif command is 'setcs':
|
|
||||||
options.append("-overwrite")
|
|
||||||
options.append(ud.configspecfile)
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise FetchError("Invalid ccase command %s" % command)
|
|
||||||
|
|
||||||
ccasecmd = "%s %s" % (basecmd, " ".join(options))
|
|
||||||
self.debug("ccasecmd = %s" % ccasecmd)
|
|
||||||
return ccasecmd
|
|
||||||
|
|
||||||
def _write_configspec(self, ud, d):
|
|
||||||
"""
|
|
||||||
Create config spec file (ud.configspecfile) for ccase view
|
|
||||||
"""
|
|
||||||
config_spec = ""
|
|
||||||
custom_config_spec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", d)
|
|
||||||
if custom_config_spec is not None:
|
|
||||||
for line in custom_config_spec.split("\\n"):
|
|
||||||
config_spec += line+"\n"
|
|
||||||
bb.warn("A custom config spec has been set, SRCREV is only relevant for the tarball name.")
|
|
||||||
else:
|
|
||||||
config_spec += "element * CHECKEDOUT\n"
|
|
||||||
config_spec += "element * %s\n" % ud.label
|
|
||||||
config_spec += "load %s%s\n" % (ud.vob, ud.module)
|
|
||||||
|
|
||||||
logger.info("Using config spec: \n%s" % config_spec)
|
|
||||||
|
|
||||||
with open(ud.configspecfile, 'w') as f:
|
|
||||||
f.write(config_spec)
|
|
||||||
|
|
||||||
def _remove_view(self, ud, d):
|
|
||||||
if os.path.exists(ud.viewdir):
|
|
||||||
cmd = self._build_ccase_command(ud, 'rmview');
|
|
||||||
logger.info("cleaning up [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
|
||||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
|
||||||
output = runfetchcmd(cmd, d, workdir=ud.ccasedir)
|
|
||||||
logger.info("rmview output: %s", output)
|
|
||||||
|
|
||||||
def need_update(self, ud, d):
|
|
||||||
if ("LATEST" in ud.label) or (ud.customspec and "LATEST" in ud.customspec):
|
|
||||||
ud.identifier += "-%s" % d.getVar("DATETIME",d, True)
|
|
||||||
return True
|
|
||||||
if os.path.exists(ud.localpath):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def supports_srcrev(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def sortable_revision(self, ud, d, name):
|
|
||||||
return False, ud.identifier
|
|
||||||
|
|
||||||
def download(self, ud, d):
|
|
||||||
"""Fetch url"""
|
|
||||||
|
|
||||||
# Make a fresh view
|
|
||||||
bb.utils.mkdirhier(ud.ccasedir)
|
|
||||||
self._write_configspec(ud, d)
|
|
||||||
cmd = self._build_ccase_command(ud, 'mkview')
|
|
||||||
logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
|
||||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
|
||||||
try:
|
|
||||||
runfetchcmd(cmd, d)
|
|
||||||
except FetchError as e:
|
|
||||||
if "CRCLI2008E" in e.msg:
|
|
||||||
raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
|
|
||||||
else:
|
|
||||||
raise e
|
|
||||||
|
|
||||||
# Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
|
|
||||||
cmd = self._build_ccase_command(ud, 'setcs');
|
|
||||||
logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
|
|
||||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
|
||||||
output = runfetchcmd(cmd, d, workdir=ud.viewdir)
|
|
||||||
logger.info("%s", output)
|
|
||||||
|
|
||||||
# Copy the configspec to the viewdir so we have it in our source tarball later
|
|
||||||
shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))
|
|
||||||
|
|
||||||
# Clean clearcase meta-data before tar
|
|
||||||
|
|
||||||
runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])
|
|
||||||
|
|
||||||
# Clean up so we can create a new view next time
|
|
||||||
self.clean(ud, d);
|
|
||||||
|
|
||||||
def clean(self, ud, d):
|
|
||||||
self._remove_view(ud, d)
|
|
||||||
bb.utils.remove(ud.configspecfile)
|
|
|
@ -29,6 +29,7 @@ BitBake build tools.
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import bb
|
import bb
|
||||||
|
from bb import data
|
||||||
from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
|
from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
|
||||||
from bb.fetch2 import runfetchcmd
|
from bb.fetch2 import runfetchcmd
|
||||||
|
|
||||||
|
@ -36,7 +37,7 @@ class Cvs(FetchMethod):
|
||||||
"""
|
"""
|
||||||
Class to fetch a module or modules from cvs repositories
|
Class to fetch a module or modules from cvs repositories
|
||||||
"""
|
"""
|
||||||
def supports(self, ud, d):
|
def supports(self, url, ud, d):
|
||||||
"""
|
"""
|
||||||
Check to see if a given url can be fetched with cvs.
|
Check to see if a given url can be fetched with cvs.
|
||||||
"""
|
"""
|
||||||
|
@ -63,16 +64,16 @@ class Cvs(FetchMethod):
|
||||||
if 'fullpath' in ud.parm:
|
if 'fullpath' in ud.parm:
|
||||||
fullpath = '_fullpath'
|
fullpath = '_fullpath'
|
||||||
|
|
||||||
ud.localfile = d.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath))
|
ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
|
||||||
|
|
||||||
def need_update(self, ud, d):
|
def need_update(self, url, ud, d):
|
||||||
if (ud.date == "now"):
|
if (ud.date == "now"):
|
||||||
return True
|
return True
|
||||||
if not os.path.exists(ud.localpath):
|
if not os.path.exists(ud.localpath):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def download(self, ud, d):
|
def download(self, loc, ud, d):
|
||||||
|
|
||||||
method = ud.parm.get('method', 'pserver')
|
method = ud.parm.get('method', 'pserver')
|
||||||
localdir = ud.parm.get('localdir', ud.module)
|
localdir = ud.parm.get('localdir', ud.module)
|
||||||
|
@ -87,10 +88,10 @@ class Cvs(FetchMethod):
|
||||||
cvsroot = ud.path
|
cvsroot = ud.path
|
||||||
else:
|
else:
|
||||||
cvsroot = ":" + method
|
cvsroot = ":" + method
|
||||||
cvsproxyhost = d.getVar('CVS_PROXY_HOST')
|
cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
|
||||||
if cvsproxyhost:
|
if cvsproxyhost:
|
||||||
cvsroot += ";proxy=" + cvsproxyhost
|
cvsroot += ";proxy=" + cvsproxyhost
|
||||||
cvsproxyport = d.getVar('CVS_PROXY_PORT')
|
cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
|
||||||
if cvsproxyport:
|
if cvsproxyport:
|
||||||
cvsroot += ";proxyport=" + cvsproxyport
|
cvsroot += ";proxyport=" + cvsproxyport
|
||||||
cvsroot += ":" + ud.user
|
cvsroot += ":" + ud.user
|
||||||
|
@ -110,9 +111,15 @@ class Cvs(FetchMethod):
|
||||||
if ud.tag:
|
if ud.tag:
|
||||||
options.append("-r %s" % ud.tag)
|
options.append("-r %s" % ud.tag)
|
||||||
|
|
||||||
cvsbasecmd = d.getVar("FETCHCMD_cvs")
|
localdata = data.createCopy(d)
|
||||||
cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module
|
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||||
cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options)
|
data.update_data(localdata)
|
||||||
|
|
||||||
|
data.setVar('CVSROOT', cvsroot, localdata)
|
||||||
|
data.setVar('CVSCOOPTS', " ".join(options), localdata)
|
||||||
|
data.setVar('CVSMODULE', ud.module, localdata)
|
||||||
|
cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
|
||||||
|
cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
|
||||||
|
|
||||||
if cvs_rsh:
|
if cvs_rsh:
|
||||||
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
|
cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
|
||||||
|
@ -120,26 +127,25 @@ class Cvs(FetchMethod):
|
||||||
|
|
||||||
# create module directory
|
# create module directory
|
||||||
logger.debug(2, "Fetch: checking for module directory")
|
logger.debug(2, "Fetch: checking for module directory")
|
||||||
pkg = d.getVar('PN')
|
pkg = data.expand('${PN}', d)
|
||||||
pkgdir = os.path.join(d.getVar('CVSDIR'), pkg)
|
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||||
moddir = os.path.join(pkgdir, localdir)
|
moddir = os.path.join(pkgdir, localdir)
|
||||||
workdir = None
|
|
||||||
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
|
||||||
logger.info("Update " + ud.url)
|
logger.info("Update " + loc)
|
||||||
bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
|
bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
|
||||||
# update sources there
|
# update sources there
|
||||||
workdir = moddir
|
os.chdir(moddir)
|
||||||
cmd = cvsupdatecmd
|
cmd = cvsupdatecmd
|
||||||
else:
|
else:
|
||||||
logger.info("Fetch " + ud.url)
|
logger.info("Fetch " + loc)
|
||||||
# check out sources there
|
# check out sources there
|
||||||
bb.utils.mkdirhier(pkgdir)
|
bb.utils.mkdirhier(pkgdir)
|
||||||
workdir = pkgdir
|
os.chdir(pkgdir)
|
||||||
logger.debug(1, "Running %s", cvscmd)
|
logger.debug(1, "Running %s", cvscmd)
|
||||||
bb.fetch2.check_network_access(d, cvscmd, ud.url)
|
bb.fetch2.check_network_access(d, cvscmd, ud.url)
|
||||||
cmd = cvscmd
|
cmd = cvscmd
|
||||||
|
|
||||||
runfetchcmd(cmd, d, cleanup=[moddir], workdir=workdir)
|
runfetchcmd(cmd, d, cleanup = [moddir])
|
||||||
|
|
||||||
if not os.access(moddir, os.R_OK):
|
if not os.access(moddir, os.R_OK):
|
||||||
raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
|
raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
|
||||||
|
@ -148,24 +154,27 @@ class Cvs(FetchMethod):
|
||||||
if scmdata == "keep":
|
if scmdata == "keep":
|
||||||
tar_flags = ""
|
tar_flags = ""
|
||||||
else:
|
else:
|
||||||
tar_flags = "--exclude='CVS'"
|
tar_flags = "--exclude 'CVS'"
|
||||||
|
|
||||||
# tar them up to a defined filename
|
# tar them up to a defined filename
|
||||||
workdir = None
|
|
||||||
if 'fullpath' in ud.parm:
|
if 'fullpath' in ud.parm:
|
||||||
workdir = pkgdir
|
os.chdir(pkgdir)
|
||||||
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
|
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
|
||||||
else:
|
else:
|
||||||
workdir = os.path.dirname(os.path.realpath(moddir))
|
os.chdir(moddir)
|
||||||
|
os.chdir('..')
|
||||||
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
|
cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
|
||||||
|
|
||||||
runfetchcmd(cmd, d, cleanup=[ud.localpath], workdir=workdir)
|
runfetchcmd(cmd, d, cleanup = [ud.localpath])
|
||||||
|
|
||||||
def clean(self, ud, d):
|
def clean(self, ud, d):
|
||||||
""" Clean CVS Files and tarballs """
|
""" Clean CVS Files and tarballs """
|
||||||
|
|
||||||
pkg = d.getVar('PN')
|
pkg = data.expand('${PN}', d)
|
||||||
pkgdir = os.path.join(d.getVar("CVSDIR"), pkg)
|
localdata = data.createCopy(d)
|
||||||
|
data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||||
|
data.update_data(localdata)
|
||||||
|
pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
|
||||||
|
|
||||||
bb.utils.remove(pkgdir, True)
|
bb.utils.remove(pkgdir, True)
|
||||||
bb.utils.remove(ud.localpath)
|
bb.utils.remove(ud.localpath)
|
||||||
|
|
|
@ -11,8 +11,8 @@ Supported SRC_URI options are:
|
||||||
- branch
|
- branch
|
||||||
The git branch to retrieve from. The default is "master"
|
The git branch to retrieve from. The default is "master"
|
||||||
|
|
||||||
This option also supports multiple branch fetching, with branches
|
this option also support multiple branches fetching, branches
|
||||||
separated by commas. In multiple branches case, the name option
|
are seperated by comma. in multiple branches case, the name option
|
||||||
must have the same number of names to match the branches, which is
|
must have the same number of names to match the branches, which is
|
||||||
used to specify the SRC_REV for the branch
|
used to specify the SRC_REV for the branch
|
||||||
e.g:
|
e.g:
|
||||||
|
@ -25,34 +25,19 @@ Supported SRC_URI options are:
|
||||||
|
|
||||||
- protocol
|
- protocol
|
||||||
The method to use to access the repository. Common options are "git",
|
The method to use to access the repository. Common options are "git",
|
||||||
"http", "https", "file", "ssh" and "rsync". The default is "https".
|
"http", "file" and "rsync". The default is "git"
|
||||||
|
|
||||||
- rebaseable
|
- rebaseable
|
||||||
rebaseable indicates that the upstream git repo may rebase in the future,
|
rebaseable indicates that the upstream git repo may rebase in the future,
|
||||||
and current revision may disappear from upstream repo. This option will
|
and current revision may disappear from upstream repo. This option will
|
||||||
remind fetcher to preserve local cache carefully for future use.
|
reminder fetcher to preserve local cache carefully for future use.
|
||||||
The default value is "0", set rebaseable=1 for rebaseable git repo.
|
The default value is "0", set rebaseable=1 for rebaseable git repo
|
||||||
|
|
||||||
- nocheckout
|
- nocheckout
|
||||||
Don't checkout source code when unpacking. set this option for the recipe
|
Don't checkout source code when unpacking. set this option for the recipe
|
||||||
who has its own routine to checkout code.
|
who has its own routine to checkout code.
|
||||||
The default is "0", set nocheckout=1 if needed.
|
The default is "0", set nocheckout=1 if needed.
|
||||||
|
|
||||||
- bareclone
|
|
||||||
Create a bare clone of the source code and don't checkout the source code
|
|
||||||
when unpacking. Set this option for the recipe who has its own routine to
|
|
||||||
checkout code and tracking branch requirements.
|
|
||||||
The default is "0", set bareclone=1 if needed.
|
|
||||||
|
|
||||||
- nobranch
|
|
||||||
Don't check the SHA validation for branch. set this option for the recipe
|
|
||||||
referring to commit which is valid in tag instead of branch.
|
|
||||||
The default is "0", set nobranch=1 if needed.
|
|
||||||
|
|
||||||
- usehead
|
|
||||||
For local git:// urls to use the current branch HEAD as the revision for use with
|
|
||||||
AUTOREV. Implies nobranch.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#Copyright (C) 2005 Richard Purdie
|
#Copyright (C) 2005 Richard Purdie
|
||||||
|
@ -70,71 +55,27 @@ Supported SRC_URI options are:
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||||
|
|
||||||
import errno
|
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import bb
|
import bb
|
||||||
import errno
|
from bb import data
|
||||||
import bb.progress
|
|
||||||
from bb.fetch2 import FetchMethod
|
from bb.fetch2 import FetchMethod
|
||||||
from bb.fetch2 import runfetchcmd
|
from bb.fetch2 import runfetchcmd
|
||||||
from bb.fetch2 import logger
|
from bb.fetch2 import logger
|
||||||
|
|
||||||
|
|
||||||
class GitProgressHandler(bb.progress.LineFilterProgressHandler):
|
|
||||||
"""Extract progress information from git output"""
|
|
||||||
def __init__(self, d):
|
|
||||||
self._buffer = ''
|
|
||||||
self._count = 0
|
|
||||||
super(GitProgressHandler, self).__init__(d)
|
|
||||||
# Send an initial progress event so the bar gets shown
|
|
||||||
self._fire_progress(-1)
|
|
||||||
|
|
||||||
def write(self, string):
|
|
||||||
self._buffer += string
|
|
||||||
stages = ['Counting objects', 'Compressing objects', 'Receiving objects', 'Resolving deltas']
|
|
||||||
stage_weights = [0.2, 0.05, 0.5, 0.25]
|
|
||||||
stagenum = 0
|
|
||||||
for i, stage in reversed(list(enumerate(stages))):
|
|
||||||
if stage in self._buffer:
|
|
||||||
stagenum = i
|
|
||||||
self._buffer = ''
|
|
||||||
break
|
|
||||||
self._status = stages[stagenum]
|
|
||||||
percs = re.findall(r'(\d+)%', string)
|
|
||||||
if percs:
|
|
||||||
progress = int(round((int(percs[-1]) * stage_weights[stagenum]) + (sum(stage_weights[:stagenum]) * 100)))
|
|
||||||
rates = re.findall(r'([\d.]+ [a-zA-Z]*/s+)', string)
|
|
||||||
if rates:
|
|
||||||
rate = rates[-1]
|
|
||||||
else:
|
|
||||||
rate = None
|
|
||||||
self.update(progress, rate)
|
|
||||||
else:
|
|
||||||
if stagenum == 0:
|
|
||||||
percs = re.findall(r': (\d+)', string)
|
|
||||||
if percs:
|
|
||||||
count = int(percs[-1])
|
|
||||||
if count > self._count:
|
|
||||||
self._count = count
|
|
||||||
self._fire_progress(-count)
|
|
||||||
super(GitProgressHandler, self).write(string)
|
|
||||||
|
|
||||||
|
|
||||||
class Git(FetchMethod):
|
class Git(FetchMethod):
|
||||||
"""Class to fetch a module or modules from git repositories"""
|
"""Class to fetch a module or modules from git repositories"""
|
||||||
def init(self, d):
|
def init(self, d):
|
||||||
pass
|
#
|
||||||
|
# Only enable _sortable revision if the key is set
|
||||||
def supports(self, ud, d):
|
#
|
||||||
|
if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
|
||||||
|
self._sortable_buildindex = self._sortable_buildindex_disabled
|
||||||
|
def supports(self, url, ud, d):
|
||||||
"""
|
"""
|
||||||
Check to see if a given url can be fetched with git.
|
Check to see if a given url can be fetched with git.
|
||||||
"""
|
"""
|
||||||
return ud.type in ['git']
|
return ud.type in ['git']
|
||||||
|
|
||||||
def supports_checksum(self, urldata):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
def urldata_init(self, ud, d):
|
||||||
"""
|
"""
|
||||||
init git specific variable within url data
|
init git specific variable within url data
|
||||||
|
@ -145,7 +86,7 @@ class Git(FetchMethod):
|
||||||
elif not ud.host:
|
elif not ud.host:
|
||||||
ud.proto = 'file'
|
ud.proto = 'file'
|
||||||
else:
|
else:
|
||||||
ud.proto = "https"
|
ud.proto = "git"
|
||||||
|
|
||||||
if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
|
if not ud.proto in ('git', 'file', 'ssh', 'http', 'https', 'rsync'):
|
||||||
raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
|
raise bb.fetch2.ParameterError("Invalid protocol type", ud.url)
|
||||||
|
@ -154,50 +95,27 @@ class Git(FetchMethod):
|
||||||
|
|
||||||
ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
|
ud.rebaseable = ud.parm.get("rebaseable","0") == "1"
|
||||||
|
|
||||||
ud.nobranch = ud.parm.get("nobranch","0") == "1"
|
|
||||||
|
|
||||||
# usehead implies nobranch
|
|
||||||
ud.usehead = ud.parm.get("usehead","0") == "1"
|
|
||||||
if ud.usehead:
|
|
||||||
if ud.proto != "file":
|
|
||||||
raise bb.fetch2.ParameterError("The usehead option is only for use with local ('protocol=file') git repositories", ud.url)
|
|
||||||
ud.nobranch = 1
|
|
||||||
|
|
||||||
# bareclone implies nocheckout
|
|
||||||
ud.bareclone = ud.parm.get("bareclone","0") == "1"
|
|
||||||
if ud.bareclone:
|
|
||||||
ud.nocheckout = 1
|
|
||||||
|
|
||||||
ud.unresolvedrev = {}
|
|
||||||
branches = ud.parm.get("branch", "master").split(',')
|
branches = ud.parm.get("branch", "master").split(',')
|
||||||
if len(branches) != len(ud.names):
|
if len(branches) != len(ud.names):
|
||||||
raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
|
raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
|
||||||
ud.branches = {}
|
ud.branches = {}
|
||||||
for pos, name in enumerate(ud.names):
|
for name in ud.names:
|
||||||
branch = branches[pos]
|
branch = branches[ud.names.index(name)]
|
||||||
ud.branches[name] = branch
|
ud.branches[name] = branch
|
||||||
ud.unresolvedrev[name] = branch
|
|
||||||
|
|
||||||
if ud.usehead:
|
ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||||
ud.unresolvedrev['default'] = 'HEAD'
|
|
||||||
|
|
||||||
ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0"
|
ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable
|
||||||
|
|
||||||
ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") or ud.rebaseable
|
ud.setup_revisons(d)
|
||||||
|
|
||||||
ud.setup_revisions(d)
|
|
||||||
|
|
||||||
for name in ud.names:
|
for name in ud.names:
|
||||||
# Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
|
# Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
|
||||||
if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
|
if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
|
||||||
if ud.revisions[name]:
|
ud.branches[name] = ud.revisions[name]
|
||||||
ud.unresolvedrev[name] = ud.revisions[name]
|
ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
|
||||||
ud.revisions[name] = self.latest_revision(ud, d, name)
|
|
||||||
|
|
||||||
gitsrcname = '%s%s' % (ud.host.replace(':', '.'), ud.path.replace('/', '.').replace('*', '.'))
|
|
||||||
if gitsrcname.startswith('.'):
|
|
||||||
gitsrcname = gitsrcname[1:]
|
|
||||||
|
|
||||||
|
gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
|
||||||
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
# for rebaseable git repo, it is necessary to keep mirror tar ball
|
||||||
# per revision, so that even the revision disappears from the
|
# per revision, so that even the revision disappears from the
|
||||||
# upstream repo in the future, the mirror will remain intact and still
|
# upstream repo in the future, the mirror will remain intact and still
|
||||||
|
@ -205,132 +123,111 @@ class Git(FetchMethod):
|
||||||
if ud.rebaseable:
|
if ud.rebaseable:
|
||||||
for name in ud.names:
|
for name in ud.names:
|
||||||
gitsrcname = gitsrcname + '_' + ud.revisions[name]
|
gitsrcname = gitsrcname + '_' + ud.revisions[name]
|
||||||
ud.mirrortarball = 'git2_%s.tar.gz' % gitsrcname
|
ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
|
||||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
|
ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
|
||||||
gitdir = d.getVar("GITDIR") or (d.getVar("DL_DIR") + "/git2/")
|
ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
|
||||||
ud.clonedir = os.path.join(gitdir, gitsrcname)
|
|
||||||
|
|
||||||
ud.localfile = ud.clonedir
|
ud.localfile = ud.clonedir
|
||||||
|
|
||||||
def localpath(self, ud, d):
|
def localpath(self, url, ud, d):
|
||||||
return ud.clonedir
|
return ud.clonedir
|
||||||
|
|
||||||
def need_update(self, ud, d):
|
def need_update(self, u, ud, d):
|
||||||
if not os.path.exists(ud.clonedir):
|
if not os.path.exists(ud.clonedir):
|
||||||
return True
|
return True
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
for name in ud.names:
|
for name in ud.names:
|
||||||
if not self._contains_ref(ud, d, name, ud.clonedir):
|
if not self._contains_ref(ud.revisions[name], d):
|
||||||
return True
|
return True
|
||||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def try_premirror(self, ud, d):
|
def try_premirror(self, u, ud, d):
|
||||||
# If we don't do this, updating an existing checkout with only premirrors
|
# If we don't do this, updating an existing checkout with only premirrors
|
||||||
# is not possible
|
# is not possible
|
||||||
if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
|
if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
|
||||||
return True
|
return True
|
||||||
if os.path.exists(ud.clonedir):
|
if os.path.exists(ud.clonedir):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def download(self, ud, d):
|
def download(self, loc, ud, d):
|
||||||
"""Fetch url"""
|
"""Fetch url"""
|
||||||
|
|
||||||
|
if ud.user:
|
||||||
|
username = ud.user + '@'
|
||||||
|
else:
|
||||||
|
username = ""
|
||||||
|
|
||||||
|
ud.repochanged = not os.path.exists(ud.fullmirror)
|
||||||
|
|
||||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
# If the checkout doesn't exist and the mirror tarball does, extract it
|
||||||
if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
|
if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
|
||||||
bb.utils.mkdirhier(ud.clonedir)
|
bb.utils.mkdirhier(ud.clonedir)
|
||||||
runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)
|
os.chdir(ud.clonedir)
|
||||||
|
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
|
||||||
|
|
||||||
repourl = self._get_repo_url(ud)
|
repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
||||||
|
|
||||||
# If the repo still doesn't exist, fallback to cloning it
|
# If the repo still doesn't exist, fallback to cloning it
|
||||||
if not os.path.exists(ud.clonedir):
|
if not os.path.exists(ud.clonedir):
|
||||||
# We do this since git will use a "-l" option automatically for local urls where possible
|
clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
|
||||||
if repourl.startswith("file://"):
|
bb.fetch2.check_network_access(d, clone_cmd)
|
||||||
repourl = repourl[7:]
|
runfetchcmd(clone_cmd, d)
|
||||||
clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir)
|
|
||||||
if ud.proto.lower() != 'file':
|
|
||||||
bb.fetch2.check_network_access(d, clone_cmd, ud.url)
|
|
||||||
progresshandler = GitProgressHandler(d)
|
|
||||||
runfetchcmd(clone_cmd, d, log=progresshandler)
|
|
||||||
|
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
# Update the checkout if needed
|
# Update the checkout if needed
|
||||||
needupdate = False
|
needupdate = False
|
||||||
for name in ud.names:
|
for name in ud.names:
|
||||||
if not self._contains_ref(ud, d, name, ud.clonedir):
|
if not self._contains_ref(ud.revisions[name], d):
|
||||||
needupdate = True
|
needupdate = True
|
||||||
if needupdate:
|
if needupdate:
|
||||||
try:
|
try:
|
||||||
runfetchcmd("%s remote rm origin" % ud.basecmd, d, workdir=ud.clonedir)
|
runfetchcmd("%s remote prune origin" % ud.basecmd, d)
|
||||||
|
runfetchcmd("%s remote rm origin" % ud.basecmd, d)
|
||||||
except bb.fetch2.FetchError:
|
except bb.fetch2.FetchError:
|
||||||
logger.debug(1, "No Origin")
|
logger.debug(1, "No Origin")
|
||||||
|
|
||||||
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d, workdir=ud.clonedir)
|
runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
|
||||||
fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (ud.basecmd, repourl)
|
fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
|
||||||
if ud.proto.lower() != 'file':
|
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
||||||
bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
|
runfetchcmd(fetch_cmd, d)
|
||||||
progresshandler = GitProgressHandler(d)
|
runfetchcmd("%s prune-packed" % ud.basecmd, d)
|
||||||
runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
|
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
|
||||||
runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
|
ud.repochanged = True
|
||||||
runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d, workdir=ud.clonedir)
|
|
||||||
try:
|
|
||||||
os.unlink(ud.fullmirror)
|
|
||||||
except OSError as exc:
|
|
||||||
if exc.errno != errno.ENOENT:
|
|
||||||
raise
|
|
||||||
for name in ud.names:
|
|
||||||
if not self._contains_ref(ud, d, name, ud.clonedir):
|
|
||||||
raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
|
|
||||||
|
|
||||||
def build_mirror_data(self, ud, d):
|
def build_mirror_data(self, url, ud, d):
|
||||||
# Generate a mirror tarball if needed
|
# Generate a mirror tarball if needed
|
||||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
|
||||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
os.chdir(ud.clonedir)
|
||||||
if os.path.islink(ud.fullmirror):
|
|
||||||
os.unlink(ud.fullmirror)
|
|
||||||
|
|
||||||
logger.info("Creating tarball of git repository")
|
logger.info("Creating tarball of git repository")
|
||||||
runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir)
|
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
|
||||||
runfetchcmd("touch %s.done" % ud.fullmirror, d)
|
|
||||||
|
|
||||||
def unpack(self, ud, destdir, d):
|
def unpack(self, ud, destdir, d):
|
||||||
""" unpack the downloaded src to destdir"""
|
""" unpack the downloaded src to destdir"""
|
||||||
|
|
||||||
subdir = ud.parm.get("subpath", "")
|
subdir = ud.parm.get("subpath", "")
|
||||||
if subdir != "":
|
if subdir != "":
|
||||||
readpathspec = ":%s" % subdir
|
readpathspec = ":%s" % (subdir)
|
||||||
def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
|
def_destsuffix = "%s/" % os.path.basename(subdir)
|
||||||
else:
|
else:
|
||||||
readpathspec = ""
|
readpathspec = ""
|
||||||
def_destsuffix = "git/"
|
def_destsuffix = "git/"
|
||||||
|
|
||||||
destsuffix = ud.parm.get("destsuffix", def_destsuffix)
|
destsuffix = ud.parm.get("destsuffix", def_destsuffix)
|
||||||
destdir = ud.destdir = os.path.join(destdir, destsuffix)
|
destdir = os.path.join(destdir, destsuffix)
|
||||||
if os.path.exists(destdir):
|
if os.path.exists(destdir):
|
||||||
bb.utils.prunedir(destdir)
|
bb.utils.prunedir(destdir)
|
||||||
|
|
||||||
cloneflags = "-s -n"
|
runfetchcmd("git clone -s -n %s/ %s" % (ud.clonedir, destdir), d)
|
||||||
if ud.bareclone:
|
|
||||||
cloneflags += " --mirror"
|
|
||||||
|
|
||||||
runfetchcmd("%s clone %s %s/ %s" % (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
|
|
||||||
repourl = self._get_repo_url(ud)
|
|
||||||
runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d, workdir=destdir)
|
|
||||||
if not ud.nocheckout:
|
if not ud.nocheckout:
|
||||||
|
os.chdir(destdir)
|
||||||
if subdir != "":
|
if subdir != "":
|
||||||
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d,
|
runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
|
||||||
workdir=destdir)
|
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
|
||||||
runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d, workdir=destdir)
|
|
||||||
elif not ud.nobranch:
|
|
||||||
branchname = ud.branches[ud.names[0]]
|
|
||||||
runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
|
|
||||||
ud.revisions[ud.names[0]]), d, workdir=destdir)
|
|
||||||
runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
|
|
||||||
branchname), d, workdir=destdir)
|
|
||||||
else:
|
else:
|
||||||
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=destdir)
|
runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def clean(self, ud, d):
|
def clean(self, ud, d):
|
||||||
|
@ -338,163 +235,81 @@ class Git(FetchMethod):
|
||||||
|
|
||||||
bb.utils.remove(ud.localpath, True)
|
bb.utils.remove(ud.localpath, True)
|
||||||
bb.utils.remove(ud.fullmirror)
|
bb.utils.remove(ud.fullmirror)
|
||||||
bb.utils.remove(ud.fullmirror + ".done")
|
|
||||||
|
|
||||||
def supports_srcrev(self):
|
def supports_srcrev(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _contains_ref(self, ud, d, name, wd):
|
def _contains_ref(self, tag, d):
|
||||||
cmd = ""
|
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||||
if ud.nobranch:
|
cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag)
|
||||||
cmd = "%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (
|
output = runfetchcmd(cmd, d, quiet=True)
|
||||||
ud.basecmd, ud.revisions[name])
|
|
||||||
else:
|
|
||||||
cmd = "%s branch --contains %s --list %s 2> /dev/null | wc -l" % (
|
|
||||||
ud.basecmd, ud.revisions[name], ud.branches[name])
|
|
||||||
try:
|
|
||||||
output = runfetchcmd(cmd, d, quiet=True, workdir=wd)
|
|
||||||
except bb.fetch2.FetchError:
|
|
||||||
return False
|
|
||||||
if len(output.split()) > 1:
|
if len(output.split()) > 1:
|
||||||
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
|
raise bb.fetch2.FetchError("The command '%s' gave output with more then 1 line unexpectedly, output: '%s'" % (cmd, output))
|
||||||
return output.split()[0] != "0"
|
return output.split()[0] != "0"
|
||||||
|
|
||||||
def _get_repo_url(self, ud):
|
def _revision_key(self, url, ud, d, name):
|
||||||
"""
|
"""
|
||||||
Return the repository URL
|
Return a unique key for the url
|
||||||
|
"""
|
||||||
|
return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
|
||||||
|
|
||||||
|
def _latest_revision(self, url, ud, d, name):
|
||||||
|
"""
|
||||||
|
Compute the HEAD revision for the url
|
||||||
"""
|
"""
|
||||||
if ud.user:
|
if ud.user:
|
||||||
username = ud.user + '@'
|
username = ud.user + '@'
|
||||||
else:
|
else:
|
||||||
username = ""
|
username = ""
|
||||||
return "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)
|
|
||||||
|
|
||||||
def _revision_key(self, ud, d, name):
|
basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
|
||||||
"""
|
cmd = "%s ls-remote %s://%s%s%s %s" % \
|
||||||
Return a unique key for the url
|
(basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
|
||||||
"""
|
bb.fetch2.check_network_access(d, cmd)
|
||||||
return "git:" + ud.host + ud.path.replace('/', '.') + ud.unresolvedrev[name]
|
output = runfetchcmd(cmd, d, True)
|
||||||
|
if not output:
|
||||||
|
raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
|
||||||
|
return output.split()[0]
|
||||||
|
|
||||||
def _lsremote(self, ud, d, search):
|
def _build_revision(self, url, ud, d, name):
|
||||||
"""
|
|
||||||
Run git ls-remote with the specified search string
|
|
||||||
"""
|
|
||||||
# Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR,
|
|
||||||
# and WORKDIR is in PATH (as a result of RSS), our call to
|
|
||||||
# runfetchcmd() exports PATH so this function will get called again (!)
|
|
||||||
# In this scenario the return call of the function isn't actually
|
|
||||||
# important - WORKDIR isn't needed in PATH to call git ls-remote
|
|
||||||
# anyway.
|
|
||||||
if d.getVar('_BB_GIT_IN_LSREMOTE', False):
|
|
||||||
return ''
|
|
||||||
d.setVar('_BB_GIT_IN_LSREMOTE', '1')
|
|
||||||
try:
|
|
||||||
repourl = self._get_repo_url(ud)
|
|
||||||
cmd = "%s ls-remote %s %s" % \
|
|
||||||
(ud.basecmd, repourl, search)
|
|
||||||
if ud.proto.lower() != 'file':
|
|
||||||
bb.fetch2.check_network_access(d, cmd, repourl)
|
|
||||||
output = runfetchcmd(cmd, d, True)
|
|
||||||
if not output:
|
|
||||||
raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url)
|
|
||||||
finally:
|
|
||||||
d.delVar('_BB_GIT_IN_LSREMOTE')
|
|
||||||
return output
|
|
||||||
|
|
||||||
def _latest_revision(self, ud, d, name):
|
|
||||||
"""
|
|
||||||
Compute the HEAD revision for the url
|
|
||||||
"""
|
|
||||||
output = self._lsremote(ud, d, "")
|
|
||||||
# Tags of the form ^{} may not work, need to fallback to other form
|
|
||||||
if ud.unresolvedrev[name][:5] == "refs/" or ud.usehead:
|
|
||||||
head = ud.unresolvedrev[name]
|
|
||||||
tag = ud.unresolvedrev[name]
|
|
||||||
else:
|
|
||||||
head = "refs/heads/%s" % ud.unresolvedrev[name]
|
|
||||||
tag = "refs/tags/%s" % ud.unresolvedrev[name]
|
|
||||||
for s in [head, tag + "^{}", tag]:
|
|
||||||
for l in output.strip().split('\n'):
|
|
||||||
sha1, ref = l.split()
|
|
||||||
if s == ref:
|
|
||||||
return sha1
|
|
||||||
raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output for %s" % \
|
|
||||||
(ud.unresolvedrev[name], ud.host+ud.path))
|
|
||||||
|
|
||||||
def latest_versionstring(self, ud, d):
|
|
||||||
"""
|
|
||||||
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
|
|
||||||
by searching through the tags output of ls-remote, comparing
|
|
||||||
versions and returning the highest match.
|
|
||||||
"""
|
|
||||||
pupver = ('', '')
|
|
||||||
|
|
||||||
tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P<pver>([0-9][\.|_]?)+)")
|
|
||||||
try:
|
|
||||||
output = self._lsremote(ud, d, "refs/tags/*")
|
|
||||||
except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
|
|
||||||
return pupver
|
|
||||||
|
|
||||||
verstring = ""
|
|
||||||
revision = ""
|
|
||||||
for line in output.split("\n"):
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
|
|
||||||
tag_head = line.split("/")[-1]
|
|
||||||
# Ignore non-released branches
|
|
||||||
m = re.search("(alpha|beta|rc|final)+", tag_head)
|
|
||||||
if m:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# search for version in the line
|
|
||||||
tag = tagregex.search(tag_head)
|
|
||||||
if tag == None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
tag = tag.group('pver')
|
|
||||||
tag = tag.replace("_", ".")
|
|
||||||
|
|
||||||
if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
|
|
||||||
continue
|
|
||||||
|
|
||||||
verstring = tag
|
|
||||||
revision = line.split()[0]
|
|
||||||
pupver = (verstring, revision)
|
|
||||||
|
|
||||||
return pupver
|
|
||||||
|
|
||||||
def _build_revision(self, ud, d, name):
|
|
||||||
return ud.revisions[name]
|
return ud.revisions[name]
|
||||||
|
|
||||||
def gitpkgv_revision(self, ud, d, name):
|
def _sortable_buildindex_disabled(self, url, ud, d, rev):
|
||||||
"""
|
"""
|
||||||
Return a sortable revision number by counting commits in the history
|
Return a suitable buildindex for the revision specified. This is done by counting revisions
|
||||||
Based on gitpkgv.bblass in meta-openembedded
|
using "git rev-list" which may or may not work in different circumstances.
|
||||||
"""
|
"""
|
||||||
rev = self._build_revision(ud, d, name)
|
|
||||||
localpath = ud.localpath
|
|
||||||
rev_file = os.path.join(localpath, "oe-gitpkgv_" + rev)
|
|
||||||
if not os.path.exists(localpath):
|
|
||||||
commits = None
|
|
||||||
else:
|
|
||||||
if not os.path.exists(rev_file) or not os.path.getsize(rev_file):
|
|
||||||
from pipes import quote
|
|
||||||
commits = bb.fetch2.runfetchcmd(
|
|
||||||
"git rev-list %s -- | wc -l" % quote(rev),
|
|
||||||
d, quiet=True).strip().lstrip('0')
|
|
||||||
if commits:
|
|
||||||
open(rev_file, "w").write("%d\n" % int(commits))
|
|
||||||
else:
|
|
||||||
commits = open(rev_file, "r").readline(128).strip()
|
|
||||||
if commits:
|
|
||||||
return False, "%s+%s" % (commits, rev[:7])
|
|
||||||
else:
|
|
||||||
return True, str(rev)
|
|
||||||
|
|
||||||
def checkstatus(self, fetch, ud, d):
|
cwd = os.getcwd()
|
||||||
|
|
||||||
|
# Check if we have the rev already
|
||||||
|
|
||||||
|
if not os.path.exists(ud.clonedir):
|
||||||
|
logger.debug(1, "GIT repository for %s does not exist in %s. \
|
||||||
|
Downloading.", url, ud.clonedir)
|
||||||
|
self.download(None, ud, d)
|
||||||
|
if not os.path.exists(ud.clonedir):
|
||||||
|
logger.error("GIT repository for %s does not exist in %s after \
|
||||||
|
download. Cannot get sortable buildnumber, using \
|
||||||
|
old value", url, ud.clonedir)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
os.chdir(ud.clonedir)
|
||||||
|
if not self._contains_ref(rev, d):
|
||||||
|
self.download(None, ud, d)
|
||||||
|
|
||||||
|
output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
|
||||||
|
os.chdir(cwd)
|
||||||
|
|
||||||
|
buildindex = "%s" % output.split()[0]
|
||||||
|
logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
|
||||||
|
return buildindex
|
||||||
|
|
||||||
|
def checkstatus(self, uri, ud, d):
|
||||||
|
fetchcmd = "%s ls-remote %s" % (ud.basecmd, uri)
|
||||||
try:
|
try:
|
||||||
self._lsremote(ud, d, "")
|
runfetchcmd(fetchcmd, d, quiet=True)
|
||||||
return True
|
return True
|
||||||
except bb.fetch2.FetchError:
|
except FetchError:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -1,74 +0,0 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
"""
|
|
||||||
BitBake 'Fetch' git annex implementation
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Copyright (C) 2014 Otavio Salvador
|
|
||||||
# Copyright (C) 2014 O.S. Systems Software LTDA.
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import bb
|
|
||||||
from bb.fetch2.git import Git
|
|
||||||
from bb.fetch2 import runfetchcmd
|
|
||||||
from bb.fetch2 import logger
|
|
||||||
|
|
||||||
class GitANNEX(Git):
|
|
||||||
def supports(self, ud, d):
|
|
||||||
"""
|
|
||||||
Check to see if a given url can be fetched with git.
|
|
||||||
"""
|
|
||||||
return ud.type in ['gitannex']
|
|
||||||
|
|
||||||
def uses_annex(self, ud, d, wd):
|
|
||||||
for name in ud.names:
|
|
||||||
try:
|
|
||||||
runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True, workdir=wd)
|
|
||||||
return True
|
|
||||||
except bb.fetch.FetchError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def update_annex(self, ud, d, wd):
|
|
||||||
try:
|
|
||||||
runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True, workdir=wd)
|
|
||||||
except bb.fetch.FetchError:
|
|
||||||
return False
|
|
||||||
runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True, workdir=wd)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def download(self, ud, d):
|
|
||||||
Git.download(self, ud, d)
|
|
||||||
|
|
||||||
annex = self.uses_annex(ud, d, ud.clonedir)
|
|
||||||
if annex:
|
|
||||||
self.update_annex(ud, d, ud.clonedir)
|
|
||||||
|
|
||||||
def unpack(self, ud, destdir, d):
|
|
||||||
Git.unpack(self, ud, destdir, d)
|
|
||||||
|
|
||||||
try:
|
|
||||||
runfetchcmd("%s annex init" % (ud.basecmd), d, workdir=ud.destdir)
|
|
||||||
except bb.fetch.FetchError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
annex = self.uses_annex(ud, d, ud.destdir)
|
|
||||||
if annex:
|
|
||||||
runfetchcmd("%s annex get" % (ud.basecmd), d, workdir=ud.destdir)
|
|
||||||
runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True, workdir=ud.destdir)
|
|
||||||
|
|
|
@ -1,130 +0,0 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
"""
|
|
||||||
BitBake 'Fetch' git submodules implementation
|
|
||||||
|
|
||||||
Inherits from and extends the Git fetcher to retrieve submodules of a git repository
|
|
||||||
after cloning.
|
|
||||||
|
|
||||||
SRC_URI = "gitsm://<see Git fetcher for syntax>"
|
|
||||||
|
|
||||||
See the Git fetcher, git://, for usage documentation.
|
|
||||||
|
|
||||||
NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your recipe.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Copyright (C) 2013 Richard Purdie
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import bb
|
|
||||||
from bb.fetch2.git import Git
|
|
||||||
from bb.fetch2 import runfetchcmd
|
|
||||||
from bb.fetch2 import logger
|
|
||||||
|
|
||||||
class GitSM(Git):
|
|
||||||
def supports(self, ud, d):
|
|
||||||
"""
|
|
||||||
Check to see if a given url can be fetched with git.
|
|
||||||
"""
|
|
||||||
return ud.type in ['gitsm']
|
|
||||||
|
|
||||||
def uses_submodules(self, ud, d, wd):
|
|
||||||
for name in ud.names:
|
|
||||||
try:
|
|
||||||
runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=wd)
|
|
||||||
return True
|
|
||||||
except bb.fetch.FetchError:
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _set_relative_paths(self, repopath):
|
|
||||||
"""
|
|
||||||
Fix submodule paths to be relative instead of absolute,
|
|
||||||
so that when we move the repo it doesn't break
|
|
||||||
(In Git 1.7.10+ this is done automatically)
|
|
||||||
"""
|
|
||||||
submodules = []
|
|
||||||
with open(os.path.join(repopath, '.gitmodules'), 'r') as f:
|
|
||||||
for line in f.readlines():
|
|
||||||
if line.startswith('[submodule'):
|
|
||||||
submodules.append(line.split('"')[1])
|
|
||||||
|
|
||||||
for module in submodules:
|
|
||||||
repo_conf = os.path.join(repopath, module, '.git')
|
|
||||||
if os.path.exists(repo_conf):
|
|
||||||
with open(repo_conf, 'r') as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
newpath = ''
|
|
||||||
for i, line in enumerate(lines):
|
|
||||||
if line.startswith('gitdir:'):
|
|
||||||
oldpath = line.split(': ')[-1].rstrip()
|
|
||||||
if oldpath.startswith('/'):
|
|
||||||
newpath = '../' * (module.count('/') + 1) + '.git/modules/' + module
|
|
||||||
lines[i] = 'gitdir: %s\n' % newpath
|
|
||||||
break
|
|
||||||
if newpath:
|
|
||||||
with open(repo_conf, 'w') as f:
|
|
||||||
for line in lines:
|
|
||||||
f.write(line)
|
|
||||||
|
|
||||||
repo_conf2 = os.path.join(repopath, '.git', 'modules', module, 'config')
|
|
||||||
if os.path.exists(repo_conf2):
|
|
||||||
with open(repo_conf2, 'r') as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
newpath = ''
|
|
||||||
for i, line in enumerate(lines):
|
|
||||||
if line.lstrip().startswith('worktree = '):
|
|
||||||
oldpath = line.split(' = ')[-1].rstrip()
|
|
||||||
if oldpath.startswith('/'):
|
|
||||||
newpath = '../' * (module.count('/') + 3) + module
|
|
||||||
lines[i] = '\tworktree = %s\n' % newpath
|
|
||||||
break
|
|
||||||
if newpath:
|
|
||||||
with open(repo_conf2, 'w') as f:
|
|
||||||
for line in lines:
|
|
||||||
f.write(line)
|
|
||||||
|
|
||||||
def update_submodules(self, ud, d):
|
|
||||||
# We have to convert bare -> full repo, do the submodule bit, then convert back
|
|
||||||
tmpclonedir = ud.clonedir + ".tmp"
|
|
||||||
gitdir = tmpclonedir + os.sep + ".git"
|
|
||||||
bb.utils.remove(tmpclonedir, True)
|
|
||||||
os.mkdir(tmpclonedir)
|
|
||||||
os.rename(ud.clonedir, gitdir)
|
|
||||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
|
|
||||||
runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir)
|
|
||||||
runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir)
|
|
||||||
runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir)
|
|
||||||
self._set_relative_paths(tmpclonedir)
|
|
||||||
runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir)
|
|
||||||
os.rename(gitdir, ud.clonedir,)
|
|
||||||
bb.utils.remove(tmpclonedir, True)
|
|
||||||
|
|
||||||
def download(self, ud, d):
|
|
||||||
Git.download(self, ud, d)
|
|
||||||
|
|
||||||
submodules = self.uses_submodules(ud, d, ud.clonedir)
|
|
||||||
if submodules:
|
|
||||||
self.update_submodules(ud, d)
|
|
||||||
|
|
||||||
def unpack(self, ud, destdir, d):
|
|
||||||
Git.unpack(self, ud, destdir, d)
|
|
||||||
|
|
||||||
submodules = self.uses_submodules(ud, d, ud.destdir)
|
|
||||||
if submodules:
|
|
||||||
runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
|
|
||||||
runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
|
|
|
@ -28,7 +28,7 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import bb
|
import bb
|
||||||
import errno
|
from bb import data
|
||||||
from bb.fetch2 import FetchMethod
|
from bb.fetch2 import FetchMethod
|
||||||
from bb.fetch2 import FetchError
|
from bb.fetch2 import FetchError
|
||||||
from bb.fetch2 import MissingParameterError
|
from bb.fetch2 import MissingParameterError
|
||||||
|
@ -37,19 +37,12 @@ from bb.fetch2 import logger
|
||||||
|
|
||||||
class Hg(FetchMethod):
|
class Hg(FetchMethod):
|
||||||
"""Class to fetch from mercurial repositories"""
|
"""Class to fetch from mercurial repositories"""
|
||||||
def supports(self, ud, d):
|
def supports(self, url, ud, d):
|
||||||
"""
|
"""
|
||||||
Check to see if a given url can be fetched with mercurial.
|
Check to see if a given url can be fetched with mercurial.
|
||||||
"""
|
"""
|
||||||
return ud.type in ['hg']
|
return ud.type in ['hg']
|
||||||
|
|
||||||
def supports_checksum(self, urldata):
|
|
||||||
"""
|
|
||||||
Don't require checksums for local archives created from
|
|
||||||
repository checkouts.
|
|
||||||
"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
def urldata_init(self, ud, d):
|
||||||
"""
|
"""
|
||||||
init hg specific variable within url data
|
init hg specific variable within url data
|
||||||
|
@ -59,35 +52,21 @@ class Hg(FetchMethod):
|
||||||
|
|
||||||
ud.module = ud.parm["module"]
|
ud.module = ud.parm["module"]
|
||||||
|
|
||||||
if 'protocol' in ud.parm:
|
# Create paths to mercurial checkouts
|
||||||
ud.proto = ud.parm['protocol']
|
relpath = self._strip_leading_slashes(ud.path)
|
||||||
elif not ud.host:
|
ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
|
||||||
ud.proto = 'file'
|
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
||||||
else:
|
|
||||||
ud.proto = "hg"
|
|
||||||
|
|
||||||
ud.setup_revisions(d)
|
ud.setup_revisons(d)
|
||||||
|
|
||||||
if 'rev' in ud.parm:
|
if 'rev' in ud.parm:
|
||||||
ud.revision = ud.parm['rev']
|
ud.revision = ud.parm['rev']
|
||||||
elif not ud.revision:
|
elif not ud.revision:
|
||||||
ud.revision = self.latest_revision(ud, d)
|
ud.revision = self.latest_revision(ud.url, ud, d)
|
||||||
|
|
||||||
# Create paths to mercurial checkouts
|
ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
|
||||||
hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \
|
|
||||||
ud.host, ud.path.replace('/', '.'))
|
|
||||||
ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname
|
|
||||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
|
|
||||||
|
|
||||||
hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/")
|
def need_update(self, url, ud, d):
|
||||||
ud.pkgdir = os.path.join(hgdir, hgsrcname)
|
|
||||||
ud.moddir = os.path.join(ud.pkgdir, ud.module)
|
|
||||||
ud.localfile = ud.moddir
|
|
||||||
ud.basecmd = d.getVar("FETCHCMD_hg") or "/usr/bin/env hg"
|
|
||||||
|
|
||||||
ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS")
|
|
||||||
|
|
||||||
def need_update(self, ud, d):
|
|
||||||
revTag = ud.parm.get('rev', 'tip')
|
revTag = ud.parm.get('rev', 'tip')
|
||||||
if revTag == "tip":
|
if revTag == "tip":
|
||||||
return True
|
return True
|
||||||
|
@ -95,22 +74,15 @@ class Hg(FetchMethod):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def try_premirror(self, ud, d):
|
|
||||||
# If we don't do this, updating an existing checkout with only premirrors
|
|
||||||
# is not possible
|
|
||||||
if d.getVar("BB_FETCH_PREMIRRORONLY") is not None:
|
|
||||||
return True
|
|
||||||
if os.path.exists(ud.moddir):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _buildhgcommand(self, ud, d, command):
|
def _buildhgcommand(self, ud, d, command):
|
||||||
"""
|
"""
|
||||||
Build up an hg commandline based on ud
|
Build up an hg commandline based on ud
|
||||||
command is "fetch", "update", "info"
|
command is "fetch", "update", "info"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
proto = ud.parm.get('protocol', 'http')
|
basecmd = data.expand('${FETCHCMD_hg}', d)
|
||||||
|
|
||||||
|
proto = ud.parm.get('proto', 'http')
|
||||||
|
|
||||||
host = ud.host
|
host = ud.host
|
||||||
if proto == "file":
|
if proto == "file":
|
||||||
|
@ -120,150 +92,85 @@ class Hg(FetchMethod):
|
||||||
if not ud.user:
|
if not ud.user:
|
||||||
hgroot = host + ud.path
|
hgroot = host + ud.path
|
||||||
else:
|
else:
|
||||||
if ud.pswd:
|
hgroot = ud.user + "@" + host + ud.path
|
||||||
hgroot = ud.user + ":" + ud.pswd + "@" + host + ud.path
|
|
||||||
else:
|
|
||||||
hgroot = ud.user + "@" + host + ud.path
|
|
||||||
|
|
||||||
if command == "info":
|
if command == "info":
|
||||||
return "%s identify -i %s://%s/%s" % (ud.basecmd, proto, hgroot, ud.module)
|
return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
|
||||||
|
|
||||||
options = [];
|
options = [];
|
||||||
|
if ud.revision:
|
||||||
# Don't specify revision for the fetch; clone the entire repo.
|
|
||||||
# This avoids an issue if the specified revision is a tag, because
|
|
||||||
# the tag actually exists in the specified revision + 1, so it won't
|
|
||||||
# be available when used in any successive commands.
|
|
||||||
if ud.revision and command != "fetch":
|
|
||||||
options.append("-r %s" % ud.revision)
|
options.append("-r %s" % ud.revision)
|
||||||
|
|
||||||
if command == "fetch":
|
if command == "fetch":
|
||||||
if ud.user and ud.pswd:
|
cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
||||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" clone %s %s://%s/%s %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options), proto, hgroot, ud.module, ud.module)
|
|
||||||
else:
|
|
||||||
cmd = "%s clone %s %s://%s/%s %s" % (ud.basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
|
|
||||||
elif command == "pull":
|
elif command == "pull":
|
||||||
# do not pass options list; limiting pull to rev causes the local
|
# do not pass options list; limiting pull to rev causes the local
|
||||||
# repo not to contain it and immediately following "update" command
|
# repo not to contain it and immediately following "update" command
|
||||||
# will crash
|
# will crash
|
||||||
if ud.user and ud.pswd:
|
cmd = "%s pull" % (basecmd)
|
||||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" pull" % (ud.basecmd, ud.user, ud.pswd, proto)
|
|
||||||
else:
|
|
||||||
cmd = "%s pull" % (ud.basecmd)
|
|
||||||
elif command == "update":
|
elif command == "update":
|
||||||
if ud.user and ud.pswd:
|
cmd = "%s update -C %s" % (basecmd, " ".join(options))
|
||||||
cmd = "%s --config auth.default.prefix=* --config auth.default.username=%s --config auth.default.password=%s --config \"auth.default.schemes=%s\" update -C %s" % (ud.basecmd, ud.user, ud.pswd, proto, " ".join(options))
|
|
||||||
else:
|
|
||||||
cmd = "%s update -C %s" % (ud.basecmd, " ".join(options))
|
|
||||||
else:
|
else:
|
||||||
raise FetchError("Invalid hg command %s" % command, ud.url)
|
raise FetchError("Invalid hg command %s" % command, ud.url)
|
||||||
|
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def download(self, ud, d):
|
def download(self, loc, ud, d):
|
||||||
"""Fetch url"""
|
"""Fetch url"""
|
||||||
|
|
||||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
# If the checkout doesn't exist and the mirror tarball does, extract it
|
|
||||||
if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
|
|
||||||
bb.utils.mkdirhier(ud.pkgdir)
|
|
||||||
runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.pkgdir)
|
|
||||||
|
|
||||||
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
|
||||||
# Found the source, check whether need pull
|
updatecmd = self._buildhgcommand(ud, d, "pull")
|
||||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
logger.info("Update " + loc)
|
||||||
|
# update sources there
|
||||||
|
os.chdir(ud.moddir)
|
||||||
logger.debug(1, "Running %s", updatecmd)
|
logger.debug(1, "Running %s", updatecmd)
|
||||||
try:
|
bb.fetch2.check_network_access(d, updatecmd, ud.url)
|
||||||
runfetchcmd(updatecmd, d, workdir=ud.moddir)
|
runfetchcmd(updatecmd, d)
|
||||||
except bb.fetch2.FetchError:
|
|
||||||
# Runnning pull in the repo
|
|
||||||
pullcmd = self._buildhgcommand(ud, d, "pull")
|
|
||||||
logger.info("Pulling " + ud.url)
|
|
||||||
# update sources there
|
|
||||||
logger.debug(1, "Running %s", pullcmd)
|
|
||||||
bb.fetch2.check_network_access(d, pullcmd, ud.url)
|
|
||||||
runfetchcmd(pullcmd, d, workdir=ud.moddir)
|
|
||||||
try:
|
|
||||||
os.unlink(ud.fullmirror)
|
|
||||||
except OSError as exc:
|
|
||||||
if exc.errno != errno.ENOENT:
|
|
||||||
raise
|
|
||||||
|
|
||||||
# No source found, clone it.
|
else:
|
||||||
if not os.path.exists(ud.moddir):
|
|
||||||
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
fetchcmd = self._buildhgcommand(ud, d, "fetch")
|
||||||
logger.info("Fetch " + ud.url)
|
logger.info("Fetch " + loc)
|
||||||
# check out sources there
|
# check out sources there
|
||||||
bb.utils.mkdirhier(ud.pkgdir)
|
bb.utils.mkdirhier(ud.pkgdir)
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
logger.debug(1, "Running %s", fetchcmd)
|
logger.debug(1, "Running %s", fetchcmd)
|
||||||
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
|
bb.fetch2.check_network_access(d, fetchcmd, ud.url)
|
||||||
runfetchcmd(fetchcmd, d, workdir=ud.pkgdir)
|
runfetchcmd(fetchcmd, d)
|
||||||
|
|
||||||
# Even when we clone (fetch), we still need to update as hg's clone
|
# Even when we clone (fetch), we still need to update as hg's clone
|
||||||
# won't checkout the specified revision if its on a branch
|
# won't checkout the specified revision if its on a branch
|
||||||
updatecmd = self._buildhgcommand(ud, d, "update")
|
updatecmd = self._buildhgcommand(ud, d, "update")
|
||||||
|
os.chdir(ud.moddir)
|
||||||
logger.debug(1, "Running %s", updatecmd)
|
logger.debug(1, "Running %s", updatecmd)
|
||||||
runfetchcmd(updatecmd, d, workdir=ud.moddir)
|
runfetchcmd(updatecmd, d)
|
||||||
|
|
||||||
def clean(self, ud, d):
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
""" Clean the hg dir """
|
if scmdata == "keep":
|
||||||
|
tar_flags = ""
|
||||||
|
else:
|
||||||
|
tar_flags = "--exclude '.hg' --exclude '.hgrags'"
|
||||||
|
|
||||||
bb.utils.remove(ud.localpath, True)
|
os.chdir(ud.pkgdir)
|
||||||
bb.utils.remove(ud.fullmirror)
|
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||||
bb.utils.remove(ud.fullmirror + ".done")
|
|
||||||
|
|
||||||
def supports_srcrev(self):
|
def supports_srcrev(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _latest_revision(self, ud, d, name):
|
def _latest_revision(self, url, ud, d, name):
|
||||||
"""
|
"""
|
||||||
Compute tip revision for the url
|
Compute tip revision for the url
|
||||||
"""
|
"""
|
||||||
bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"), ud.url)
|
bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
|
||||||
output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
|
output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
|
||||||
return output.strip()
|
return output.strip()
|
||||||
|
|
||||||
def _build_revision(self, ud, d, name):
|
def _build_revision(self, url, ud, d, name):
|
||||||
return ud.revision
|
return ud.revision
|
||||||
|
|
||||||
def _revision_key(self, ud, d, name):
|
def _revision_key(self, url, ud, d, name):
|
||||||
"""
|
"""
|
||||||
Return a unique key for the url
|
Return a unique key for the url
|
||||||
"""
|
"""
|
||||||
return "hg:" + ud.moddir
|
return "hg:" + ud.moddir
|
||||||
|
|
||||||
def build_mirror_data(self, ud, d):
|
|
||||||
# Generate a mirror tarball if needed
|
|
||||||
if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
|
|
||||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
|
||||||
if os.path.islink(ud.fullmirror):
|
|
||||||
os.unlink(ud.fullmirror)
|
|
||||||
|
|
||||||
logger.info("Creating tarball of hg repository")
|
|
||||||
runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d, workdir=ud.pkgdir)
|
|
||||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.pkgdir)
|
|
||||||
|
|
||||||
def localpath(self, ud, d):
|
|
||||||
return ud.pkgdir
|
|
||||||
|
|
||||||
def unpack(self, ud, destdir, d):
|
|
||||||
"""
|
|
||||||
Make a local clone or export for the url
|
|
||||||
"""
|
|
||||||
|
|
||||||
revflag = "-r %s" % ud.revision
|
|
||||||
subdir = ud.parm.get("destsuffix", ud.module)
|
|
||||||
codir = "%s/%s" % (destdir, subdir)
|
|
||||||
|
|
||||||
scmdata = ud.parm.get("scmdata", "")
|
|
||||||
if scmdata != "nokeep":
|
|
||||||
if not os.access(os.path.join(codir, '.hg'), os.R_OK):
|
|
||||||
logger.debug(2, "Unpack: creating new hg repository in '" + codir + "'")
|
|
||||||
runfetchcmd("%s init %s" % (ud.basecmd, codir), d)
|
|
||||||
logger.debug(2, "Unpack: updating source in '" + codir + "'")
|
|
||||||
runfetchcmd("%s pull %s" % (ud.basecmd, ud.moddir), d, workdir=codir)
|
|
||||||
runfetchcmd("%s up -C %s" % (ud.basecmd, revflag), d, workdir=codir)
|
|
||||||
else:
|
|
||||||
logger.debug(2, "Unpack: extracting source to '" + codir + "'")
|
|
||||||
runfetchcmd("%s archive -t files %s %s" % (ud.basecmd, revflag, codir), d, workdir=ud.moddir)
|
|
||||||
|
|
|
@ -26,14 +26,13 @@ BitBake build tools.
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import urllib.request, urllib.parse, urllib.error
|
|
||||||
import bb
|
import bb
|
||||||
import bb.utils
|
import bb.utils
|
||||||
from bb.fetch2 import FetchMethod, FetchError
|
from bb import data
|
||||||
from bb.fetch2 import logger
|
from bb.fetch2 import FetchMethod
|
||||||
|
|
||||||
class Local(FetchMethod):
|
class Local(FetchMethod):
|
||||||
def supports(self, urldata, d):
|
def supports(self, url, urldata, d):
|
||||||
"""
|
"""
|
||||||
Check to see if a given url represents a local fetch.
|
Check to see if a given url represents a local fetch.
|
||||||
"""
|
"""
|
||||||
|
@ -41,74 +40,47 @@ class Local(FetchMethod):
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
def urldata_init(self, ud, d):
|
||||||
# We don't set localfile as for this fetcher the file is already local!
|
# We don't set localfile as for this fetcher the file is already local!
|
||||||
ud.decodedurl = urllib.parse.unquote(ud.url.split("://")[1].split(";")[0])
|
ud.basename = os.path.basename(ud.url.split("://")[1].split(";")[0])
|
||||||
ud.basename = os.path.basename(ud.decodedurl)
|
|
||||||
ud.basepath = ud.decodedurl
|
|
||||||
ud.needdonestamp = False
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def localpath(self, urldata, d):
|
def localpath(self, url, urldata, d):
|
||||||
"""
|
"""
|
||||||
Return the local filename of a given url assuming a successful fetch.
|
Return the local filename of a given url assuming a successful fetch.
|
||||||
"""
|
"""
|
||||||
return self.localpaths(urldata, d)[-1]
|
path = url.split("://")[1]
|
||||||
|
path = path.split(";")[0]
|
||||||
def localpaths(self, urldata, d):
|
|
||||||
"""
|
|
||||||
Return the local filename of a given url assuming a successful fetch.
|
|
||||||
"""
|
|
||||||
searched = []
|
|
||||||
path = urldata.decodedurl
|
|
||||||
newpath = path
|
newpath = path
|
||||||
if path[0] == "/":
|
if path[0] != "/":
|
||||||
return [path]
|
filespath = data.getVar('FILESPATH', d, True)
|
||||||
filespath = d.getVar('FILESPATH')
|
if filespath:
|
||||||
if filespath:
|
newpath = bb.utils.which(filespath, path)
|
||||||
logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
|
if not newpath:
|
||||||
newpath, hist = bb.utils.which(filespath, path, history=True)
|
filesdir = data.getVar('FILESDIR', d, True)
|
||||||
searched.extend(hist)
|
if filesdir:
|
||||||
if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
|
newpath = os.path.join(filesdir, path)
|
||||||
# For expressions using '*', best we can do is take the first directory in FILESPATH that exists
|
if not os.path.exists(newpath) and path.find("*") == -1:
|
||||||
newpath, hist = bb.utils.which(filespath, ".", history=True)
|
dldirfile = os.path.join(data.getVar("DL_DIR", d, True), os.path.basename(path))
|
||||||
searched.extend(hist)
|
return dldirfile
|
||||||
logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
|
return newpath
|
||||||
return searched
|
|
||||||
if not os.path.exists(newpath):
|
|
||||||
dldirfile = os.path.join(d.getVar("DL_DIR"), path)
|
|
||||||
logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
|
|
||||||
bb.utils.mkdirhier(os.path.dirname(dldirfile))
|
|
||||||
searched.append(dldirfile)
|
|
||||||
return searched
|
|
||||||
return searched
|
|
||||||
|
|
||||||
def need_update(self, ud, d):
|
def need_update(self, url, ud, d):
|
||||||
if ud.url.find("*") != -1:
|
if url.find("*") != -1:
|
||||||
return False
|
return False
|
||||||
if os.path.exists(ud.localpath):
|
if os.path.exists(ud.localpath):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def download(self, urldata, d):
|
def download(self, url, urldata, d):
|
||||||
"""Fetch urls (no-op for Local method)"""
|
"""Fetch urls (no-op for Local method)"""
|
||||||
# no need to fetch local files, we'll deal with them in place.
|
# no need to fetch local files, we'll deal with them in place.
|
||||||
if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath):
|
return 1
|
||||||
locations = []
|
|
||||||
filespath = d.getVar('FILESPATH')
|
|
||||||
if filespath:
|
|
||||||
locations = filespath.split(":")
|
|
||||||
locations.append(d.getVar("DL_DIR"))
|
|
||||||
|
|
||||||
msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations)
|
def checkstatus(self, url, urldata, d):
|
||||||
raise FetchError(msg)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def checkstatus(self, fetch, urldata, d):
|
|
||||||
"""
|
"""
|
||||||
Check the status of the url
|
Check the status of the url
|
||||||
"""
|
"""
|
||||||
if urldata.localpath.find("*") != -1:
|
if urldata.localpath.find("*") != -1:
|
||||||
logger.info("URL %s looks like a glob and was therefore not checked.", urldata.url)
|
logger.info("URL %s looks like a glob and was therefore not checked.", url)
|
||||||
return True
|
return True
|
||||||
if os.path.exists(urldata.localpath):
|
if os.path.exists(urldata.localpath):
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -1,304 +0,0 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
"""
|
|
||||||
BitBake 'Fetch' NPM implementation
|
|
||||||
|
|
||||||
The NPM fetcher is used to retrieve files from the npmjs repository
|
|
||||||
|
|
||||||
Usage in the recipe:
|
|
||||||
|
|
||||||
SRC_URI = "npm://registry.npmjs.org/;name=${PN};version=${PV}"
|
|
||||||
Suported SRC_URI options are:
|
|
||||||
|
|
||||||
- name
|
|
||||||
- version
|
|
||||||
|
|
||||||
npm://registry.npmjs.org/${PN}/-/${PN}-${PV}.tgz would become npm://registry.npmjs.org;name=${PN};version=${PV}
|
|
||||||
The fetcher all triggers off the existence of ud.localpath. If that exists and has the ".done" stamp, its assumed the fetch is good/done
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import urllib.request, urllib.parse, urllib.error
|
|
||||||
import json
|
|
||||||
import subprocess
|
|
||||||
import signal
|
|
||||||
import bb
|
|
||||||
from bb.fetch2 import FetchMethod
|
|
||||||
from bb.fetch2 import FetchError
|
|
||||||
from bb.fetch2 import ChecksumError
|
|
||||||
from bb.fetch2 import runfetchcmd
|
|
||||||
from bb.fetch2 import logger
|
|
||||||
from bb.fetch2 import UnpackError
|
|
||||||
from bb.fetch2 import ParameterError
|
|
||||||
|
|
||||||
def subprocess_setup():
|
|
||||||
# Python installs a SIGPIPE handler by default. This is usually not what
|
|
||||||
# non-Python subprocesses expect.
|
|
||||||
# SIGPIPE errors are known issues with gzip/bash
|
|
||||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
|
||||||
|
|
||||||
class Npm(FetchMethod):
|
|
||||||
|
|
||||||
"""Class to fetch urls via 'npm'"""
|
|
||||||
def init(self, d):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def supports(self, ud, d):
|
|
||||||
"""
|
|
||||||
Check to see if a given url can be fetched with npm
|
|
||||||
"""
|
|
||||||
return ud.type in ['npm']
|
|
||||||
|
|
||||||
def debug(self, msg):
|
|
||||||
logger.debug(1, "NpmFetch: %s", msg)
|
|
||||||
|
|
||||||
def clean(self, ud, d):
|
|
||||||
logger.debug(2, "Calling cleanup %s" % ud.pkgname)
|
|
||||||
bb.utils.remove(ud.localpath, False)
|
|
||||||
bb.utils.remove(ud.pkgdatadir, True)
|
|
||||||
bb.utils.remove(ud.fullmirror, False)
|
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
|
||||||
"""
|
|
||||||
init NPM specific variable within url data
|
|
||||||
"""
|
|
||||||
if 'downloadfilename' in ud.parm:
|
|
||||||
ud.basename = ud.parm['downloadfilename']
|
|
||||||
else:
|
|
||||||
ud.basename = os.path.basename(ud.path)
|
|
||||||
|
|
||||||
# can't call it ud.name otherwise fetcher base class will start doing sha1stuff
|
|
||||||
# TODO: find a way to get an sha1/sha256 manifest of pkg & all deps
|
|
||||||
ud.pkgname = ud.parm.get("name", None)
|
|
||||||
if not ud.pkgname:
|
|
||||||
raise ParameterError("NPM fetcher requires a name parameter", ud.url)
|
|
||||||
ud.version = ud.parm.get("version", None)
|
|
||||||
if not ud.version:
|
|
||||||
raise ParameterError("NPM fetcher requires a version parameter", ud.url)
|
|
||||||
ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version)
|
|
||||||
ud.bbnpmmanifest = ud.bbnpmmanifest.replace('/', '-')
|
|
||||||
ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0]
|
|
||||||
prefixdir = "npm/%s" % ud.pkgname
|
|
||||||
ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir)
|
|
||||||
if not os.path.exists(ud.pkgdatadir):
|
|
||||||
bb.utils.mkdirhier(ud.pkgdatadir)
|
|
||||||
ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest)
|
|
||||||
|
|
||||||
self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate "
|
|
||||||
ud.prefixdir = prefixdir
|
|
||||||
|
|
||||||
ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0")
|
|
||||||
ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version)
|
|
||||||
ud.mirrortarball = ud.mirrortarball.replace('/', '-')
|
|
||||||
ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball)
|
|
||||||
|
|
||||||
def need_update(self, ud, d):
|
|
||||||
if os.path.exists(ud.localpath):
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _runwget(self, ud, d, command, quiet):
|
|
||||||
logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
|
|
||||||
bb.fetch2.check_network_access(d, command, ud.url)
|
|
||||||
dldir = d.getVar("DL_DIR")
|
|
||||||
runfetchcmd(command, d, quiet, workdir=dldir)
|
|
||||||
|
|
||||||
def _unpackdep(self, ud, pkg, data, destdir, dldir, d):
|
|
||||||
file = data[pkg]['tgz']
|
|
||||||
logger.debug(2, "file to extract is %s" % file)
|
|
||||||
if file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
|
|
||||||
cmd = 'tar xz --strip 1 --no-same-owner --warning=no-unknown-keyword -f %s/%s' % (dldir, file)
|
|
||||||
else:
|
|
||||||
bb.fatal("NPM package %s downloaded not a tarball!" % file)
|
|
||||||
|
|
||||||
# Change to subdir before executing command
|
|
||||||
if not os.path.exists(destdir):
|
|
||||||
os.makedirs(destdir)
|
|
||||||
path = d.getVar('PATH')
|
|
||||||
if path:
|
|
||||||
cmd = "PATH=\"%s\" %s" % (path, cmd)
|
|
||||||
bb.note("Unpacking %s to %s/" % (file, destdir))
|
|
||||||
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=destdir)
|
|
||||||
|
|
||||||
if ret != 0:
|
|
||||||
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), ud.url)
|
|
||||||
|
|
||||||
if 'deps' not in data[pkg]:
|
|
||||||
return
|
|
||||||
for dep in data[pkg]['deps']:
|
|
||||||
self._unpackdep(ud, dep, data[pkg]['deps'], "%s/node_modules/%s" % (destdir, dep), dldir, d)
|
|
||||||
|
|
||||||
|
|
||||||
def unpack(self, ud, destdir, d):
|
|
||||||
dldir = d.getVar("DL_DIR")
|
|
||||||
with open("%s/npm/%s" % (dldir, ud.bbnpmmanifest)) as datafile:
|
|
||||||
workobj = json.load(datafile)
|
|
||||||
dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname)
|
|
||||||
|
|
||||||
if 'subdir' in ud.parm:
|
|
||||||
unpackdir = '%s/%s' % (destdir, ud.parm.get('subdir'))
|
|
||||||
else:
|
|
||||||
unpackdir = '%s/npmpkg' % destdir
|
|
||||||
|
|
||||||
self._unpackdep(ud, ud.pkgname, workobj, unpackdir, dldir, d)
|
|
||||||
|
|
||||||
def _parse_view(self, output):
|
|
||||||
'''
|
|
||||||
Parse the output of npm view --json; the last JSON result
|
|
||||||
is assumed to be the one that we're interested in.
|
|
||||||
'''
|
|
||||||
pdata = None
|
|
||||||
outdeps = {}
|
|
||||||
datalines = []
|
|
||||||
bracelevel = 0
|
|
||||||
for line in output.splitlines():
|
|
||||||
if bracelevel:
|
|
||||||
datalines.append(line)
|
|
||||||
elif '{' in line:
|
|
||||||
datalines = []
|
|
||||||
datalines.append(line)
|
|
||||||
bracelevel = bracelevel + line.count('{') - line.count('}')
|
|
||||||
if datalines:
|
|
||||||
pdata = json.loads('\n'.join(datalines))
|
|
||||||
return pdata
|
|
||||||
|
|
||||||
def _getdependencies(self, pkg, data, version, d, ud, optional=False, fetchedlist=None):
|
|
||||||
if fetchedlist is None:
|
|
||||||
fetchedlist = []
|
|
||||||
pkgfullname = pkg
|
|
||||||
if version != '*' and not '/' in version:
|
|
||||||
pkgfullname += "@'%s'" % version
|
|
||||||
logger.debug(2, "Calling getdeps on %s" % pkg)
|
|
||||||
fetchcmd = "npm view %s --json --registry %s" % (pkgfullname, ud.registry)
|
|
||||||
output = runfetchcmd(fetchcmd, d, True)
|
|
||||||
pdata = self._parse_view(output)
|
|
||||||
if not pdata:
|
|
||||||
raise FetchError("The command '%s' returned no output" % fetchcmd)
|
|
||||||
if optional:
|
|
||||||
pkg_os = pdata.get('os', None)
|
|
||||||
if pkg_os:
|
|
||||||
if not isinstance(pkg_os, list):
|
|
||||||
pkg_os = [pkg_os]
|
|
||||||
blacklist = False
|
|
||||||
for item in pkg_os:
|
|
||||||
if item.startswith('!'):
|
|
||||||
blacklist = True
|
|
||||||
break
|
|
||||||
if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os:
|
|
||||||
logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg)
|
|
||||||
return
|
|
||||||
#logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
|
|
||||||
outputurl = pdata['dist']['tarball']
|
|
||||||
data[pkg] = {}
|
|
||||||
data[pkg]['tgz'] = os.path.basename(outputurl)
|
|
||||||
if not outputurl in fetchedlist:
|
|
||||||
self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
|
|
||||||
fetchedlist.append(outputurl)
|
|
||||||
|
|
||||||
dependencies = pdata.get('dependencies', {})
|
|
||||||
optionalDependencies = pdata.get('optionalDependencies', {})
|
|
||||||
dependencies.update(optionalDependencies)
|
|
||||||
depsfound = {}
|
|
||||||
optdepsfound = {}
|
|
||||||
data[pkg]['deps'] = {}
|
|
||||||
for dep in dependencies:
|
|
||||||
if dep in optionalDependencies:
|
|
||||||
optdepsfound[dep] = dependencies[dep]
|
|
||||||
else:
|
|
||||||
depsfound[dep] = dependencies[dep]
|
|
||||||
for dep, version in optdepsfound.items():
|
|
||||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud, optional=True, fetchedlist=fetchedlist)
|
|
||||||
for dep, version in depsfound.items():
|
|
||||||
self._getdependencies(dep, data[pkg]['deps'], version, d, ud, fetchedlist=fetchedlist)
|
|
||||||
|
|
||||||
def _getshrinkeddependencies(self, pkg, data, version, d, ud, lockdown, manifest, toplevel=True):
|
|
||||||
logger.debug(2, "NPM shrinkwrap file is %s" % data)
|
|
||||||
if toplevel:
|
|
||||||
name = data.get('name', None)
|
|
||||||
if name and name != pkg:
|
|
||||||
for obj in data.get('dependencies', []):
|
|
||||||
if obj == pkg:
|
|
||||||
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest, False)
|
|
||||||
return
|
|
||||||
outputurl = "invalid"
|
|
||||||
if ('resolved' not in data) or (not data['resolved'].startswith('http')):
|
|
||||||
# will be the case for ${PN}
|
|
||||||
fetchcmd = "npm view %s@%s dist.tarball --registry %s" % (pkg, version, ud.registry)
|
|
||||||
logger.debug(2, "Found this matching URL: %s" % str(fetchcmd))
|
|
||||||
outputurl = runfetchcmd(fetchcmd, d, True)
|
|
||||||
else:
|
|
||||||
outputurl = data['resolved']
|
|
||||||
self._runwget(ud, d, "%s --directory-prefix=%s %s" % (self.basecmd, ud.prefixdir, outputurl), False)
|
|
||||||
manifest[pkg] = {}
|
|
||||||
manifest[pkg]['tgz'] = os.path.basename(outputurl).rstrip()
|
|
||||||
manifest[pkg]['deps'] = {}
|
|
||||||
|
|
||||||
if pkg in lockdown:
|
|
||||||
sha1_expected = lockdown[pkg][version]
|
|
||||||
sha1_data = bb.utils.sha1_file("npm/%s/%s" % (ud.pkgname, manifest[pkg]['tgz']))
|
|
||||||
if sha1_expected != sha1_data:
|
|
||||||
msg = "\nFile: '%s' has %s checksum %s when %s was expected" % (manifest[pkg]['tgz'], 'sha1', sha1_data, sha1_expected)
|
|
||||||
raise ChecksumError('Checksum mismatch!%s' % msg)
|
|
||||||
else:
|
|
||||||
logger.debug(2, "No lockdown data for %s@%s" % (pkg, version))
|
|
||||||
|
|
||||||
if 'dependencies' in data:
|
|
||||||
for obj in data['dependencies']:
|
|
||||||
logger.debug(2, "Found dep is %s" % str(obj))
|
|
||||||
self._getshrinkeddependencies(obj, data['dependencies'][obj], data['dependencies'][obj]['version'], d, ud, lockdown, manifest[pkg]['deps'], False)
|
|
||||||
|
|
||||||
def download(self, ud, d):
|
|
||||||
"""Fetch url"""
|
|
||||||
jsondepobj = {}
|
|
||||||
shrinkobj = {}
|
|
||||||
lockdown = {}
|
|
||||||
|
|
||||||
if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
|
|
||||||
dest = d.getVar("DL_DIR")
|
|
||||||
bb.utils.mkdirhier(dest)
|
|
||||||
runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
|
|
||||||
return
|
|
||||||
|
|
||||||
shwrf = d.getVar('NPM_SHRINKWRAP')
|
|
||||||
logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
|
|
||||||
if shwrf:
|
|
||||||
try:
|
|
||||||
with open(shwrf) as datafile:
|
|
||||||
shrinkobj = json.load(datafile)
|
|
||||||
except Exception as e:
|
|
||||||
raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e)))
|
|
||||||
elif not ud.ignore_checksums:
|
|
||||||
logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
|
|
||||||
lckdf = d.getVar('NPM_LOCKDOWN')
|
|
||||||
logger.debug(2, "NPM lockdown file is %s" % lckdf)
|
|
||||||
if lckdf:
|
|
||||||
try:
|
|
||||||
with open(lckdf) as datafile:
|
|
||||||
lockdown = json.load(datafile)
|
|
||||||
except Exception as e:
|
|
||||||
raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e)))
|
|
||||||
elif not ud.ignore_checksums:
|
|
||||||
logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)
|
|
||||||
|
|
||||||
if ('name' not in shrinkobj):
|
|
||||||
self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
|
|
||||||
else:
|
|
||||||
self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)
|
|
||||||
|
|
||||||
with open(ud.localpath, 'w') as outfile:
|
|
||||||
json.dump(jsondepobj, outfile)
|
|
||||||
|
|
||||||
def build_mirror_data(self, ud, d):
|
|
||||||
# Generate a mirror tarball if needed
|
|
||||||
if ud.write_tarballs and not os.path.exists(ud.fullmirror):
|
|
||||||
# it's possible that this symlink points to read-only filesystem with PREMIRROR
|
|
||||||
if os.path.islink(ud.fullmirror):
|
|
||||||
os.unlink(ud.fullmirror)
|
|
||||||
|
|
||||||
dldir = d.getVar("DL_DIR")
|
|
||||||
logger.info("Creating tarball of npm data")
|
|
||||||
runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
|
|
||||||
workdir=dldir)
|
|
||||||
runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
|
|
|
@ -10,6 +10,7 @@ import os
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import bb
|
import bb
|
||||||
|
from bb import data
|
||||||
from bb.fetch2 import FetchMethod
|
from bb.fetch2 import FetchMethod
|
||||||
from bb.fetch2 import FetchError
|
from bb.fetch2 import FetchError
|
||||||
from bb.fetch2 import MissingParameterError
|
from bb.fetch2 import MissingParameterError
|
||||||
|
@ -19,7 +20,7 @@ class Osc(FetchMethod):
|
||||||
"""Class to fetch a module or modules from Opensuse build server
|
"""Class to fetch a module or modules from Opensuse build server
|
||||||
repositories."""
|
repositories."""
|
||||||
|
|
||||||
def supports(self, ud, d):
|
def supports(self, url, ud, d):
|
||||||
"""
|
"""
|
||||||
Check to see if a given url can be fetched with osc.
|
Check to see if a given url can be fetched with osc.
|
||||||
"""
|
"""
|
||||||
|
@ -33,20 +34,20 @@ class Osc(FetchMethod):
|
||||||
|
|
||||||
# Create paths to osc checkouts
|
# Create paths to osc checkouts
|
||||||
relpath = self._strip_leading_slashes(ud.path)
|
relpath = self._strip_leading_slashes(ud.path)
|
||||||
ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host)
|
ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
|
||||||
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
|
ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
|
||||||
|
|
||||||
if 'rev' in ud.parm:
|
if 'rev' in ud.parm:
|
||||||
ud.revision = ud.parm['rev']
|
ud.revision = ud.parm['rev']
|
||||||
else:
|
else:
|
||||||
pv = d.getVar("PV", False)
|
pv = data.getVar("PV", d, 0)
|
||||||
rev = bb.fetch2.srcrev_internal_helper(ud, d)
|
rev = bb.fetch2.srcrev_internal_helper(ud, d)
|
||||||
if rev and rev != True:
|
if rev and rev != True:
|
||||||
ud.revision = rev
|
ud.revision = rev
|
||||||
else:
|
else:
|
||||||
ud.revision = ""
|
ud.revision = ""
|
||||||
|
|
||||||
ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision))
|
ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
|
||||||
|
|
||||||
def _buildosccommand(self, ud, d, command):
|
def _buildosccommand(self, ud, d, command):
|
||||||
"""
|
"""
|
||||||
|
@ -54,9 +55,9 @@ class Osc(FetchMethod):
|
||||||
command is "fetch", "update", "info"
|
command is "fetch", "update", "info"
|
||||||
"""
|
"""
|
||||||
|
|
||||||
basecmd = d.expand('${FETCHCMD_osc}')
|
basecmd = data.expand('${FETCHCMD_osc}', d)
|
||||||
|
|
||||||
proto = ud.parm.get('protocol', 'ocs')
|
proto = ud.parm.get('proto', 'ocs')
|
||||||
|
|
||||||
options = []
|
options = []
|
||||||
|
|
||||||
|
@ -76,32 +77,34 @@ class Osc(FetchMethod):
|
||||||
|
|
||||||
return osccmd
|
return osccmd
|
||||||
|
|
||||||
def download(self, ud, d):
|
def download(self, loc, ud, d):
|
||||||
"""
|
"""
|
||||||
Fetch url
|
Fetch url
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
|
||||||
|
|
||||||
if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK):
|
if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
|
||||||
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
oscupdatecmd = self._buildosccommand(ud, d, "update")
|
||||||
logger.info("Update "+ ud.url)
|
logger.info("Update "+ loc)
|
||||||
# update sources there
|
# update sources there
|
||||||
|
os.chdir(ud.moddir)
|
||||||
logger.debug(1, "Running %s", oscupdatecmd)
|
logger.debug(1, "Running %s", oscupdatecmd)
|
||||||
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
|
bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
|
||||||
runfetchcmd(oscupdatecmd, d, workdir=ud.moddir)
|
runfetchcmd(oscupdatecmd, d)
|
||||||
else:
|
else:
|
||||||
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
oscfetchcmd = self._buildosccommand(ud, d, "fetch")
|
||||||
logger.info("Fetch " + ud.url)
|
logger.info("Fetch " + loc)
|
||||||
# check out sources there
|
# check out sources there
|
||||||
bb.utils.mkdirhier(ud.pkgdir)
|
bb.utils.mkdirhier(ud.pkgdir)
|
||||||
|
os.chdir(ud.pkgdir)
|
||||||
logger.debug(1, "Running %s", oscfetchcmd)
|
logger.debug(1, "Running %s", oscfetchcmd)
|
||||||
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
|
bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
|
||||||
runfetchcmd(oscfetchcmd, d, workdir=ud.pkgdir)
|
runfetchcmd(oscfetchcmd, d)
|
||||||
|
|
||||||
|
os.chdir(os.path.join(ud.pkgdir + ud.path))
|
||||||
# tar them up to a defined filename
|
# tar them up to a defined filename
|
||||||
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d,
|
runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
|
||||||
cleanup=[ud.localpath], workdir=os.path.join(ud.pkgdir + ud.path))
|
|
||||||
|
|
||||||
def supports_srcrev(self):
|
def supports_srcrev(self):
|
||||||
return False
|
return False
|
||||||
|
@ -111,7 +114,7 @@ class Osc(FetchMethod):
|
||||||
Generate a .oscrc to be used for this run.
|
Generate a .oscrc to be used for this run.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
config_path = os.path.join(d.getVar('OSCDIR'), "oscrc")
|
config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
|
||||||
if (os.path.exists(config_path)):
|
if (os.path.exists(config_path)):
|
||||||
os.remove(config_path)
|
os.remove(config_path)
|
||||||
|
|
||||||
|
@ -120,8 +123,8 @@ class Osc(FetchMethod):
|
||||||
f.write("apisrv = %s\n" % ud.host)
|
f.write("apisrv = %s\n" % ud.host)
|
||||||
f.write("scheme = http\n")
|
f.write("scheme = http\n")
|
||||||
f.write("su-wrapper = su -c\n")
|
f.write("su-wrapper = su -c\n")
|
||||||
f.write("build-root = %s\n" % d.getVar('WORKDIR'))
|
f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
|
||||||
f.write("urllist = %s\n" % d.getVar("OSCURLLIST"))
|
f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
|
||||||
f.write("extra-pkgs = gzip\n")
|
f.write("extra-pkgs = gzip\n")
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
f.write("[%s]\n" % ud.host)
|
f.write("[%s]\n" % ud.host)
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
# ex:ts=4:sw=4:sts=4:et
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
||||||
"""
|
"""
|
||||||
BitBake 'Fetch' implementation for perforce
|
BitBake 'Fetch' implementations
|
||||||
|
|
||||||
|
Classes for obtaining upstream sources for the
|
||||||
|
BitBake build tools.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Copyright (C) 2003, 2004 Chris Larson
|
# Copyright (C) 2003, 2004 Chris Larson
|
||||||
# Copyright (C) 2016 Kodak Alaris, Inc.
|
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
|
@ -23,187 +25,172 @@ BitBake 'Fetch' implementation for perforce
|
||||||
#
|
#
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
||||||
|
|
||||||
|
from future_builtins import zip
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import bb
|
import bb
|
||||||
|
from bb import data
|
||||||
from bb.fetch2 import FetchMethod
|
from bb.fetch2 import FetchMethod
|
||||||
from bb.fetch2 import FetchError
|
from bb.fetch2 import FetchError
|
||||||
from bb.fetch2 import logger
|
from bb.fetch2 import logger
|
||||||
from bb.fetch2 import runfetchcmd
|
from bb.fetch2 import runfetchcmd
|
||||||
|
|
||||||
class Perforce(FetchMethod):
|
class Perforce(FetchMethod):
|
||||||
""" Class to fetch from perforce repositories """
|
def supports(self, url, ud, d):
|
||||||
def supports(self, ud, d):
|
|
||||||
""" Check to see if a given url can be fetched with perforce. """
|
|
||||||
return ud.type in ['p4']
|
return ud.type in ['p4']
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
def doparse(url, d):
|
||||||
"""
|
parm = {}
|
||||||
Initialize perforce specific variables within url data. If P4CONFIG is
|
path = url.split("://")[1]
|
||||||
provided by the env, use it. If P4PORT is specified by the recipe, use
|
delim = path.find("@");
|
||||||
its values, which may override the settings in P4CONFIG.
|
|
||||||
"""
|
|
||||||
ud.basecmd = d.getVar('FETCHCMD_p4')
|
|
||||||
if not ud.basecmd:
|
|
||||||
ud.basecmd = "/usr/bin/env p4"
|
|
||||||
|
|
||||||
ud.dldir = d.getVar('P4DIR')
|
|
||||||
if not ud.dldir:
|
|
||||||
ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4')
|
|
||||||
|
|
||||||
path = ud.url.split('://')[1]
|
|
||||||
path = path.split(';')[0]
|
|
||||||
delim = path.find('@');
|
|
||||||
if delim != -1:
|
if delim != -1:
|
||||||
(ud.user, ud.pswd) = path.split('@')[0].split(':')
|
(user, pswd, host, port) = path.split('@')[0].split(":")
|
||||||
ud.path = path.split('@')[1]
|
path = path.split('@')[1]
|
||||||
else:
|
else:
|
||||||
ud.path = path
|
(host, port) = data.getVar('P4PORT', d).split(':')
|
||||||
|
user = ""
|
||||||
|
pswd = ""
|
||||||
|
|
||||||
ud.usingp4config = False
|
if path.find(";") != -1:
|
||||||
p4port = d.getVar('P4PORT')
|
keys=[]
|
||||||
|
values=[]
|
||||||
|
plist = path.split(';')
|
||||||
|
for item in plist:
|
||||||
|
if item.count('='):
|
||||||
|
(key, value) = item.split('=')
|
||||||
|
keys.append(key)
|
||||||
|
values.append(value)
|
||||||
|
|
||||||
if p4port:
|
parm = dict(zip(keys, values))
|
||||||
logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port)
|
path = "//" + path.split(';')[0]
|
||||||
ud.host = p4port
|
host += ":%s" % (port)
|
||||||
else:
|
parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||||
logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...')
|
|
||||||
ud.usingp4config = True
|
|
||||||
p4cmd = '%s info | grep "Server address"' % ud.basecmd
|
|
||||||
bb.fetch2.check_network_access(d, p4cmd, ud.url)
|
|
||||||
ud.host = runfetchcmd(p4cmd, d, True)
|
|
||||||
ud.host = ud.host.split(': ')[1].strip()
|
|
||||||
logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
|
|
||||||
if not ud.host:
|
|
||||||
raise FetchError('Could not determine P4PORT from P4CONFIG')
|
|
||||||
|
|
||||||
if ud.path.find('/...') >= 0:
|
return host, path, user, pswd, parm
|
||||||
ud.pathisdir = True
|
doparse = staticmethod(doparse)
|
||||||
else:
|
|
||||||
ud.pathisdir = False
|
|
||||||
|
|
||||||
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
|
def getcset(d, depot, host, user, pswd, parm):
|
||||||
cleanedhost = ud.host.replace(':', '.')
|
|
||||||
ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
|
|
||||||
|
|
||||||
ud.setup_revisions(d)
|
|
||||||
|
|
||||||
ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision))
|
|
||||||
|
|
||||||
def _buildp4command(self, ud, d, command, depot_filename=None):
|
|
||||||
"""
|
|
||||||
Build a p4 commandline. Valid commands are "changes", "print", and
|
|
||||||
"files". depot_filename is the full path to the file in the depot
|
|
||||||
including the trailing '#rev' value.
|
|
||||||
"""
|
|
||||||
p4opt = ""
|
p4opt = ""
|
||||||
|
if "cset" in parm:
|
||||||
|
return parm["cset"];
|
||||||
|
if user:
|
||||||
|
p4opt += " -u %s" % (user)
|
||||||
|
if pswd:
|
||||||
|
p4opt += " -P %s" % (pswd)
|
||||||
|
if host:
|
||||||
|
p4opt += " -p %s" % (host)
|
||||||
|
|
||||||
if ud.user:
|
p4date = data.getVar("P4DATE", d, True)
|
||||||
p4opt += ' -u "%s"' % (ud.user)
|
if "revision" in parm:
|
||||||
|
depot += "#%s" % (parm["revision"])
|
||||||
|
elif "label" in parm:
|
||||||
|
depot += "@%s" % (parm["label"])
|
||||||
|
elif p4date:
|
||||||
|
depot += "@%s" % (p4date)
|
||||||
|
|
||||||
if ud.pswd:
|
p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
|
||||||
p4opt += ' -P "%s"' % (ud.pswd)
|
logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
|
||||||
|
p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
|
||||||
|
cset = p4file.readline().strip()
|
||||||
|
logger.debug(1, "READ %s", cset)
|
||||||
|
if not cset:
|
||||||
|
return -1
|
||||||
|
|
||||||
if ud.host and not ud.usingp4config:
|
return cset.split(' ')[1]
|
||||||
p4opt += ' -p %s' % (ud.host)
|
getcset = staticmethod(getcset)
|
||||||
|
|
||||||
if hasattr(ud, 'revision') and ud.revision:
|
def urldata_init(self, ud, d):
|
||||||
pathnrev = '%s@%s' % (ud.path, ud.revision)
|
(host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
|
||||||
else:
|
|
||||||
pathnrev = '%s' % (ud.path)
|
|
||||||
|
|
||||||
if depot_filename:
|
# If a label is specified, we use that as our filename
|
||||||
if ud.pathisdir: # Remove leading path to obtain filename
|
|
||||||
filename = depot_filename[len(ud.path)-1:]
|
|
||||||
else:
|
|
||||||
filename = depot_filename[depot_filename.rfind('/'):]
|
|
||||||
filename = filename[:filename.find('#')] # Remove trailing '#rev'
|
|
||||||
|
|
||||||
if command == 'changes':
|
if "label" in parm:
|
||||||
p4cmd = '%s%s changes -m 1 //%s' % (ud.basecmd, p4opt, pathnrev)
|
ud.localfile = "%s.tar.gz" % (parm["label"])
|
||||||
elif command == 'print':
|
return
|
||||||
if depot_filename != None:
|
|
||||||
p4cmd = '%s%s print -o "p4/%s" "%s"' % (ud.basecmd, p4opt, filename, depot_filename)
|
|
||||||
else:
|
|
||||||
raise FetchError('No depot file name provided to p4 %s' % command, ud.url)
|
|
||||||
elif command == 'files':
|
|
||||||
p4cmd = '%s%s files //%s' % (ud.basecmd, p4opt, pathnrev)
|
|
||||||
else:
|
|
||||||
raise FetchError('Invalid p4 command %s' % command, ud.url)
|
|
||||||
|
|
||||||
return p4cmd
|
base = path
|
||||||
|
which = path.find('/...')
|
||||||
|
if which != -1:
|
||||||
|
base = path[:which]
|
||||||
|
|
||||||
def _p4listfiles(self, ud, d):
|
base = self._strip_leading_slashes(base)
|
||||||
|
|
||||||
|
cset = Perforce.getcset(d, path, host, user, pswd, parm)
|
||||||
|
|
||||||
|
ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
|
||||||
|
|
||||||
|
def download(self, loc, ud, d):
|
||||||
"""
|
"""
|
||||||
Return a list of the file names which are present in the depot using the
|
Fetch urls
|
||||||
'p4 files' command, including trailing '#rev' file revision indicator
|
|
||||||
"""
|
"""
|
||||||
p4cmd = self._buildp4command(ud, d, 'files')
|
|
||||||
bb.fetch2.check_network_access(d, p4cmd, ud.url)
|
|
||||||
p4fileslist = runfetchcmd(p4cmd, d, True)
|
|
||||||
p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()]
|
|
||||||
|
|
||||||
if not p4fileslist:
|
(host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
|
||||||
raise FetchError('Unable to fetch listing of p4 files from %s@%s' % (ud.host, ud.path))
|
|
||||||
|
if depot.find('/...') != -1:
|
||||||
|
path = depot[:depot.find('/...')]
|
||||||
|
else:
|
||||||
|
path = depot
|
||||||
|
|
||||||
|
module = parm.get('module', os.path.basename(path))
|
||||||
|
|
||||||
|
localdata = data.createCopy(d)
|
||||||
|
data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
|
||||||
|
data.update_data(localdata)
|
||||||
|
|
||||||
|
# Get the p4 command
|
||||||
|
p4opt = ""
|
||||||
|
if user:
|
||||||
|
p4opt += " -u %s" % (user)
|
||||||
|
|
||||||
|
if pswd:
|
||||||
|
p4opt += " -P %s" % (pswd)
|
||||||
|
|
||||||
|
if host:
|
||||||
|
p4opt += " -p %s" % (host)
|
||||||
|
|
||||||
|
p4cmd = data.getVar('FETCHCOMMAND', localdata, True)
|
||||||
|
|
||||||
|
# create temp directory
|
||||||
|
logger.debug(2, "Fetch: creating temporary directory")
|
||||||
|
bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
|
||||||
|
data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
|
||||||
|
tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
|
||||||
|
tmpfile = tmppipe.readline().strip()
|
||||||
|
if not tmpfile:
|
||||||
|
raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
|
||||||
|
|
||||||
|
if "label" in parm:
|
||||||
|
depot = "%s@%s" % (depot, parm["label"])
|
||||||
|
else:
|
||||||
|
cset = Perforce.getcset(d, depot, host, user, pswd, parm)
|
||||||
|
depot = "%s@%s" % (depot, cset)
|
||||||
|
|
||||||
|
os.chdir(tmpfile)
|
||||||
|
logger.info("Fetch " + loc)
|
||||||
|
logger.info("%s%s files %s", p4cmd, p4opt, depot)
|
||||||
|
p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
|
||||||
|
|
||||||
|
if not p4file:
|
||||||
|
raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
filelist = []
|
|
||||||
|
|
||||||
for filename in p4fileslist:
|
for file in p4file:
|
||||||
item = filename.split(' - ')
|
list = file.split()
|
||||||
lastaction = item[1].split()
|
|
||||||
logger.debug(1, 'File: %s Last Action: %s' % (item[0], lastaction[0]))
|
if list[2] == "delete":
|
||||||
if lastaction[0] == 'delete':
|
|
||||||
continue
|
continue
|
||||||
filelist.append(item[0])
|
|
||||||
|
|
||||||
return filelist
|
dest = list[0][len(path)+1:]
|
||||||
|
where = dest.find("#")
|
||||||
|
|
||||||
def download(self, ud, d):
|
os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
|
||||||
""" Get the list of files, fetch each one """
|
count = count + 1
|
||||||
filelist = self._p4listfiles(ud, d)
|
|
||||||
if not filelist:
|
|
||||||
raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path))
|
|
||||||
|
|
||||||
bb.utils.remove(ud.pkgdir, True)
|
if count == 0:
|
||||||
bb.utils.mkdirhier(ud.pkgdir)
|
logger.error()
|
||||||
|
raise FetchError("Fetch: No files gathered from the P4 fetch", loc)
|
||||||
for afile in filelist:
|
|
||||||
p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
|
|
||||||
bb.fetch2.check_network_access(d, p4fetchcmd, ud.url)
|
|
||||||
runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
|
|
||||||
|
|
||||||
runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
|
|
||||||
|
|
||||||
def clean(self, ud, d):
|
|
||||||
""" Cleanup p4 specific files and dirs"""
|
|
||||||
bb.utils.remove(ud.localpath)
|
|
||||||
bb.utils.remove(ud.pkgdir, True)
|
|
||||||
|
|
||||||
def supports_srcrev(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _revision_key(self, ud, d, name):
|
|
||||||
""" Return a unique key for the url """
|
|
||||||
return 'p4:%s' % ud.pkgdir
|
|
||||||
|
|
||||||
def _latest_revision(self, ud, d, name):
|
|
||||||
""" Return the latest upstream scm revision number """
|
|
||||||
p4cmd = self._buildp4command(ud, d, "changes")
|
|
||||||
bb.fetch2.check_network_access(d, p4cmd, ud.url)
|
|
||||||
tip = runfetchcmd(p4cmd, d, True)
|
|
||||||
|
|
||||||
if not tip:
|
|
||||||
raise FetchError('Could not determine the latest perforce changelist')
|
|
||||||
|
|
||||||
tipcset = tip.split(' ')[1]
|
|
||||||
logger.debug(1, 'p4 tip found to be changelist %s' % tipcset)
|
|
||||||
return tipcset
|
|
||||||
|
|
||||||
def sortable_revision(self, ud, d, name):
|
|
||||||
""" Return a sortable revision number """
|
|
||||||
return False, self._build_revision(ud, d)
|
|
||||||
|
|
||||||
def _build_revision(self, ud, d):
|
|
||||||
return ud.revision
|
|
||||||
|
|
||||||
|
runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
|
||||||
|
# cleanup
|
||||||
|
bb.utils.prunedir(tmpfile)
|
||||||
|
|
|
@ -25,12 +25,13 @@ BitBake "Fetch" repo (git) implementation
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import bb
|
import bb
|
||||||
|
from bb import data
|
||||||
from bb.fetch2 import FetchMethod
|
from bb.fetch2 import FetchMethod
|
||||||
from bb.fetch2 import runfetchcmd
|
from bb.fetch2 import runfetchcmd
|
||||||
|
|
||||||
class Repo(FetchMethod):
|
class Repo(FetchMethod):
|
||||||
"""Class to fetch a module or modules from repo (git) repositories"""
|
"""Class to fetch a module or modules from repo (git) repositories"""
|
||||||
def supports(self, ud, d):
|
def supports(self, url, ud, d):
|
||||||
"""
|
"""
|
||||||
Check to see if a given url can be fetched with repo.
|
Check to see if a given url can be fetched with repo.
|
||||||
"""
|
"""
|
||||||
|
@ -50,17 +51,17 @@ class Repo(FetchMethod):
|
||||||
if not ud.manifest.endswith('.xml'):
|
if not ud.manifest.endswith('.xml'):
|
||||||
ud.manifest += '.xml'
|
ud.manifest += '.xml'
|
||||||
|
|
||||||
ud.localfile = d.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch))
|
ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
|
||||||
|
|
||||||
def download(self, ud, d):
|
def download(self, loc, ud, d):
|
||||||
"""Fetch url"""
|
"""Fetch url"""
|
||||||
|
|
||||||
if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK):
|
if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
|
||||||
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
|
logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
|
||||||
return
|
return
|
||||||
|
|
||||||
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
|
gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
|
||||||
repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo")
|
repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
|
||||||
codir = os.path.join(repodir, gitsrcname, ud.manifest)
|
codir = os.path.join(repodir, gitsrcname, ud.manifest)
|
||||||
|
|
||||||
if ud.user:
|
if ud.user:
|
||||||
|
@ -68,29 +69,30 @@ class Repo(FetchMethod):
|
||||||
else:
|
else:
|
||||||
username = ""
|
username = ""
|
||||||
|
|
||||||
repodir = os.path.join(codir, "repo")
|
bb.utils.mkdirhier(os.path.join(codir, "repo"))
|
||||||
bb.utils.mkdirhier(repodir)
|
os.chdir(os.path.join(codir, "repo"))
|
||||||
if not os.path.exists(os.path.join(repodir, ".repo")):
|
if not os.path.exists(os.path.join(codir, "repo", ".repo")):
|
||||||
bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
|
bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
|
||||||
runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d, workdir=repodir)
|
runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
|
||||||
|
|
||||||
bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
|
bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
|
||||||
runfetchcmd("repo sync", d, workdir=repodir)
|
runfetchcmd("repo sync", d)
|
||||||
|
os.chdir(codir)
|
||||||
|
|
||||||
scmdata = ud.parm.get("scmdata", "")
|
scmdata = ud.parm.get("scmdata", "")
|
||||||
if scmdata == "keep":
|
if scmdata == "keep":
|
||||||
tar_flags = ""
|
tar_flags = ""
|
||||||
else:
|
else:
|
||||||
tar_flags = "--exclude='.repo' --exclude='.git'"
|
tar_flags = "--exclude '.repo' --exclude '.git'"
|
||||||
|
|
||||||
# Create a cache
|
# Create a cache
|
||||||
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d, workdir=codir)
|
runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
|
||||||
|
|
||||||
def supports_srcrev(self):
|
def supports_srcrev(self):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _build_revision(self, ud, d):
|
def _build_revision(self, url, ud, d):
|
||||||
return ud.manifest
|
return ud.manifest
|
||||||
|
|
||||||
def _want_sortable_revision(self, ud, d):
|
def _want_sortable_revision(self, url, ud, d):
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -1,98 +0,0 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
"""
|
|
||||||
BitBake 'Fetch' implementation for Amazon AWS S3.
|
|
||||||
|
|
||||||
Class for fetching files from Amazon S3 using the AWS Command Line Interface.
|
|
||||||
The aws tool must be correctly installed and configured prior to use.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Copyright (C) 2017, Andre McCurdy <armccurdy@gmail.com>
|
|
||||||
#
|
|
||||||
# Based in part on bb.fetch2.wget:
|
|
||||||
# Copyright (C) 2003, 2004 Chris Larson
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
#
|
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
|
||||||
|
|
||||||
import os
|
|
||||||
import bb
|
|
||||||
import urllib.request, urllib.parse, urllib.error
|
|
||||||
from bb.fetch2 import FetchMethod
|
|
||||||
from bb.fetch2 import FetchError
|
|
||||||
from bb.fetch2 import runfetchcmd
|
|
||||||
|
|
||||||
class S3(FetchMethod):
|
|
||||||
"""Class to fetch urls via 'aws s3'"""
|
|
||||||
|
|
||||||
def supports(self, ud, d):
|
|
||||||
"""
|
|
||||||
Check to see if a given url can be fetched with s3.
|
|
||||||
"""
|
|
||||||
return ud.type in ['s3']
|
|
||||||
|
|
||||||
def recommends_checksum(self, urldata):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
|
||||||
if 'downloadfilename' in ud.parm:
|
|
||||||
ud.basename = ud.parm['downloadfilename']
|
|
||||||
else:
|
|
||||||
ud.basename = os.path.basename(ud.path)
|
|
||||||
|
|
||||||
ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
|
|
||||||
|
|
||||||
ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3"
|
|
||||||
|
|
||||||
def download(self, ud, d):
|
|
||||||
"""
|
|
||||||
Fetch urls
|
|
||||||
Assumes localpath was called first
|
|
||||||
"""
|
|
||||||
|
|
||||||
cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath)
|
|
||||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
|
||||||
runfetchcmd(cmd, d)
|
|
||||||
|
|
||||||
# Additional sanity checks copied from the wget class (although there
|
|
||||||
# are no known issues which mean these are required, treat the aws cli
|
|
||||||
# tool with a little healthy suspicion).
|
|
||||||
|
|
||||||
if not os.path.exists(ud.localpath):
|
|
||||||
raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath))
|
|
||||||
|
|
||||||
if os.path.getsize(ud.localpath) == 0:
|
|
||||||
os.remove(ud.localpath)
|
|
||||||
raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path))
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def checkstatus(self, fetch, ud, d):
|
|
||||||
"""
|
|
||||||
Check the status of a URL
|
|
||||||
"""
|
|
||||||
|
|
||||||
cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path)
|
|
||||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
|
||||||
output = runfetchcmd(cmd, d)
|
|
||||||
|
|
||||||
# "aws s3 ls s3://mybucket/foo" will exit with success even if the file
|
|
||||||
# is not found, so check output of the command to confirm success.
|
|
||||||
|
|
||||||
if not output:
|
|
||||||
raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path))
|
|
||||||
|
|
||||||
return True
|
|
|
@ -1,125 +0,0 @@
|
||||||
# ex:ts=4:sw=4:sts=4:et
|
|
||||||
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
|
|
||||||
"""
|
|
||||||
BitBake SFTP Fetch implementation
|
|
||||||
|
|
||||||
Class for fetching files via SFTP. It tries to adhere to the (now
|
|
||||||
expired) IETF Internet Draft for "Uniform Resource Identifier (URI)
|
|
||||||
Scheme for Secure File Transfer Protocol (SFTP) and Secure Shell
|
|
||||||
(SSH)" (SECSH URI).
|
|
||||||
|
|
||||||
It uses SFTP (as to adhere to the SECSH URI specification). It only
|
|
||||||
supports key based authentication, not password. This class, unlike
|
|
||||||
the SSH fetcher, does not support fetching a directory tree from the
|
|
||||||
remote.
|
|
||||||
|
|
||||||
http://tools.ietf.org/html/draft-ietf-secsh-scp-sftp-ssh-uri-04
|
|
||||||
https://www.iana.org/assignments/uri-schemes/prov/sftp
|
|
||||||
https://tools.ietf.org/html/draft-ietf-secsh-filexfer-13
|
|
||||||
|
|
||||||
Please note that '/' is used as host path seperator, and not ":"
|
|
||||||
as you may be used to from the scp/sftp commands. You can use a
|
|
||||||
~ (tilde) to specify a path relative to your home directory.
|
|
||||||
(The /~user/ syntax, for specyfing a path relative to another
|
|
||||||
user's home directory is not supported.) Note that the tilde must
|
|
||||||
still follow the host path seperator ("/"). See exampels below.
|
|
||||||
|
|
||||||
Example SRC_URIs:
|
|
||||||
|
|
||||||
SRC_URI = "sftp://host.example.com/dir/path.file.txt"
|
|
||||||
|
|
||||||
A path relative to your home directory.
|
|
||||||
|
|
||||||
SRC_URI = "sftp://host.example.com/~/dir/path.file.txt"
|
|
||||||
|
|
||||||
You can also specify a username (specyfing password in the
|
|
||||||
URI is not supported, use SSH keys to authenticate):
|
|
||||||
|
|
||||||
SRC_URI = "sftp://user@host.example.com/dir/path.file.txt"
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Copyright (C) 2013, Olof Johansson <olof.johansson@axis.com>
|
|
||||||
#
|
|
||||||
# Based in part on bb.fetch2.wget:
|
|
||||||
# Copyright (C) 2003, 2004 Chris Larson
|
|
||||||
#
|
|
||||||
# This program is free software; you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
|
||||||
# published by the Free Software Foundation.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along
|
|
||||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
|
||||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
|
||||||
#
|
|
||||||
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
|
|
||||||
|
|
||||||
import os
|
|
||||||
import bb
|
|
||||||
import urllib.request, urllib.parse, urllib.error
|
|
||||||
from bb.fetch2 import URI
|
|
||||||
from bb.fetch2 import FetchMethod
|
|
||||||
from bb.fetch2 import runfetchcmd
|
|
||||||
|
|
||||||
class SFTP(FetchMethod):
|
|
||||||
"""Class to fetch urls via 'sftp'"""
|
|
||||||
|
|
||||||
def supports(self, ud, d):
|
|
||||||
"""
|
|
||||||
Check to see if a given url can be fetched with sftp.
|
|
||||||
"""
|
|
||||||
return ud.type in ['sftp']
|
|
||||||
|
|
||||||
def recommends_checksum(self, urldata):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def urldata_init(self, ud, d):
|
|
||||||
if 'protocol' in ud.parm and ud.parm['protocol'] == 'git':
|
|
||||||
raise bb.fetch2.ParameterError(
|
|
||||||
"Invalid protocol - if you wish to fetch from a " +
|
|
||||||
"git repository using ssh, you need to use the " +
|
|
||||||
"git:// prefix with protocol=ssh", ud.url)
|
|
||||||
|
|
||||||
if 'downloadfilename' in ud.parm:
|
|
||||||
ud.basename = ud.parm['downloadfilename']
|
|
||||||
else:
|
|
||||||
ud.basename = os.path.basename(ud.path)
|
|
||||||
|
|
||||||
ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
|
|
||||||
|
|
||||||
def download(self, ud, d):
|
|
||||||
"""Fetch urls"""
|
|
||||||
|
|
||||||
urlo = URI(ud.url)
|
|
||||||
basecmd = 'sftp -oBatchMode=yes'
|
|
||||||
port = ''
|
|
||||||
if urlo.port:
|
|
||||||
port = '-P %d' % urlo.port
|
|
||||||
urlo.port = None
|
|
||||||
|
|
||||||
dldir = d.getVar('DL_DIR')
|
|
||||||
lpath = os.path.join(dldir, ud.localfile)
|
|
||||||
|
|
||||||
user = ''
|
|
||||||
if urlo.userinfo:
|
|
||||||
user = urlo.userinfo + '@'
|
|
||||||
|
|
||||||
path = urlo.path
|
|
||||||
|
|
||||||
# Supoprt URIs relative to the user's home directory, with
|
|
||||||
# the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
|
|
||||||
if path[:3] == '/~/':
|
|
||||||
path = path[3:]
|
|
||||||
|
|
||||||
remote = '%s%s:%s' % (user, urlo.hostname, path)
|
|
||||||
|
|
||||||
cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)
|
|
||||||
|
|
||||||
bb.fetch2.check_network_access(d, cmd, ud.url)
|
|
||||||
runfetchcmd(cmd, d)
|
|
||||||
return True
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue