Compare commits
666 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
8ed467911b | 2 days ago |
|
|
49c6b93009 | 2 days ago |
|
|
11d01e7109 | 2 days ago |
|
|
f2498689df | 2 days ago |
|
|
1402328e3e | 2 days ago |
|
|
2de7bf58a6 | 2 days ago |
|
|
b25b42e028 | 3 days ago |
|
|
52b8b767e5 | 3 days ago |
|
|
e3499a643c | 3 days ago |
|
|
8b1740b245 | 6 days ago |
|
|
a2ad876474 | 6 days ago |
|
|
af0eab071e | 6 days ago |
|
|
dff14efeef | 7 days ago |
|
|
9d70e83221 | 7 days ago |
|
|
c9236b2360 | 7 days ago |
|
|
2a172a3ad7 | 7 days ago |
|
|
088a640b6e | 7 days ago |
|
|
0d7a549182 | 7 days ago |
|
|
f4d942c197 | 7 days ago |
|
|
75b9c7255b | 2 weeks ago |
|
|
2403d4570b | 2 weeks ago |
|
|
ba6e612ec4 | 2 weeks ago |
|
|
3badddc908 | 2 weeks ago |
|
|
4d372cb160 | 2 weeks ago |
|
|
0447950acd | 2 weeks ago |
|
|
52ec693ef7 | 2 weeks ago |
|
|
08a3f271f3 | 2 weeks ago |
|
|
9bede962b3 | 2 weeks ago |
|
|
55bd5fd7a4 | 2 weeks ago |
|
|
d1ee8c788f | 2 weeks ago |
|
|
67264ed174 | 2 weeks ago |
|
|
94b2f5d8d6 | 2 weeks ago |
|
|
cd58b7eac1 | 2 weeks ago |
|
|
384d37f630 | 2 weeks ago |
|
|
5011263bf0 | 2 weeks ago |
|
|
c6d6ea8432 | 3 weeks ago |
|
|
1c5a1a3d72 | 3 weeks ago |
|
|
abe3671c7b | 3 weeks ago |
|
|
34fddee719 | 3 weeks ago |
|
|
0e2da7deb5 | 3 weeks ago |
|
|
6c81a1b550 | 3 weeks ago |
|
|
b7eddf2cdb | 3 weeks ago |
|
|
73f60a3123 | 4 weeks ago |
|
|
4251991c3a | 1 month ago |
|
|
492bd2fa1f | 1 month ago |
|
|
1d62a51810 | 1 month ago |
|
|
4111224161 | 1 month ago |
|
|
4952c5635c | 1 month ago |
|
|
8807cd53be | 1 month ago |
|
|
f5195edf08 | 1 month ago |
|
|
f7ca6af62d | 1 month ago |
|
|
0ab5b425d8 | 1 month ago |
|
|
2d9e90acf9 | 1 month ago |
|
|
fdaf09c4d6 | 1 month ago |
|
|
2398df1159 | 2 months ago |
|
|
606a21fb27 | 2 months ago |
|
|
733f4bca81 | 2 months ago |
|
|
9c71a158e7 | 2 months ago |
|
|
0a559ec8d8 | 2 months ago |
|
|
7be79d05e9 | 2 months ago |
|
|
ccaaf4b7fe | 2 months ago |
|
|
073fc48afc | 2 months ago |
|
|
e0de4d3b8e | 2 months ago |
|
|
64a581b2ac | 2 months ago |
|
|
823d1d8b47 | 2 months ago |
|
|
9b46882478 | 2 months ago |
|
|
b105877f4d | 2 months ago |
|
|
fb9efb24ca | 2 months ago |
|
|
9ce6a43329 | 2 months ago |
|
|
4af6a75278 | 2 months ago |
|
|
be3d496110 | 2 months ago |
|
|
699a8ebfb5 | 2 months ago |
|
|
abb77e77e1 | 2 months ago |
|
|
19938ec05a | 2 months ago |
|
|
8e6a93dd0f | 2 months ago |
|
|
682faf85fc | 2 months ago |
|
|
83b6cdb616 | 2 months ago |
|
|
f191f050bf | 2 months ago |
|
|
f556ec12b1 | 2 months ago |
|
|
a208daa461 | 2 months ago |
|
|
14e8334705 | 2 months ago |
|
|
1fe55f1c67 | 2 months ago |
|
|
e0103eb66c | 2 months ago |
|
|
e044893a88 | 2 months ago |
|
|
1cbd1777bc | 2 months ago |
|
|
0bafbd501c | 2 months ago |
|
|
f0a83168bf | 2 months ago |
|
|
15b2619fb2 | 2 months ago |
|
|
5c9abeda94 | 2 months ago |
|
|
006d497c25 | 2 months ago |
|
|
9609437262 | 2 months ago |
|
|
7eabcc61c1 | 2 months ago |
|
|
017de4c8e1 | 2 months ago |
|
|
7996a03a37 | 2 months ago |
|
|
5b6b076c4e | 2 months ago |
|
|
4ecafc564d | 2 months ago |
|
|
56dce28906 | 2 months ago |
|
|
ff973775ce | 2 months ago |
|
|
5ffdbb5999 | 2 months ago |
|
|
01e24f9ddf | 2 months ago |
|
|
22e7046cf6 | 2 months ago |
|
|
cc8a39864d | 2 months ago |
|
|
40fbfe58fc | 2 months ago |
|
|
b353980699 | 2 months ago |
|
|
3fe9b9bd87 | 2 months ago |
|
|
cfbb7f884e | 2 months ago |
|
|
a1b5d4941e | 2 months ago |
|
|
e32c90a63e | 2 months ago |
|
|
a143787c02 | 2 months ago |
|
|
bcc726d3b7 | 2 months ago |
|
|
780f8af1a4 | 2 months ago |
|
|
d1c4217db0 | 2 months ago |
|
|
09b972e96e | 2 months ago |
|
|
509c572682 | 2 months ago |
|
|
c9eb6e54e2 | 2 months ago |
|
|
4a442f503e | 2 months ago |
|
|
e52132c89b | 2 months ago |
|
|
f966b3e5c6 | 2 months ago |
|
|
7c9c38325d | 2 months ago |
|
|
5da56f577c | 2 months ago |
|
|
734047e1cc | 2 months ago |
|
|
8b29846990 | 2 months ago |
|
|
61a7fa1fee | 2 months ago |
|
|
28ea4780db | 2 months ago |
|
|
36f7cee2d1 | 2 months ago |
|
|
9a2e600317 | 2 months ago |
|
|
2cc507a6de | 2 months ago |
|
|
db63dd1def | 3 months ago |
|
|
cab024a6fc | 3 months ago |
|
|
aea028f175 | 3 months ago |
|
|
df890459c5 | 3 months ago |
|
|
847f34c17d | 3 months ago |
|
|
83c5ab1900 | 3 months ago |
|
|
3b7a75dfaf | 3 months ago |
|
|
191abd492a | 3 months ago |
|
|
408946adbe | 3 months ago |
|
|
fdb5c62532 | 3 months ago |
|
|
e4e82f53a1 | 3 months ago |
|
|
77b7a31949 | 3 months ago |
|
|
69a5cdce1b | 3 months ago |
|
|
85069b28cd | 3 months ago |
|
|
2305446ab8 | 3 months ago |
|
|
c72acfd966 | 3 months ago |
|
|
1e90ff25ee | 3 months ago |
|
|
48243724a0 | 3 months ago |
|
|
24745183ed | 3 months ago |
|
|
dad28e8b4a | 3 months ago |
|
|
3c648f7df8 | 3 months ago |
|
|
01baec8347 | 3 months ago |
|
|
85f0c33dc5 | 3 months ago |
|
|
8f66aa5fcd | 3 months ago |
|
|
6145508312 | 3 months ago |
|
|
9701424be0 | 3 months ago |
|
|
7d5f63ccbf | 3 months ago |
|
|
3338a651a6 | 4 months ago |
|
|
6071fb58c9 | 4 months ago |
|
|
e670bf0ebd | 4 months ago |
|
|
f6451bf795 | 4 months ago |
|
|
090952a987 | 4 months ago |
|
|
d27275ad46 | 4 months ago |
|
|
1b00ca2581 | 4 months ago |
|
|
f4f646a00a | 4 months ago |
|
|
b03c1f3d87 | 4 months ago |
|
|
9f9b37d1ad | 4 months ago |
|
|
f6902dd05d | 4 months ago |
|
|
2736f38c4b | 4 months ago |
|
|
59d5d74abd | 5 months ago |
|
|
36569792bc | 5 months ago |
|
|
85d6046f2f | 5 months ago |
|
|
c508bfb58b | 5 months ago |
|
|
76f6eb741d | 5 months ago |
|
|
3dfaf83ce7 | 5 months ago |
|
|
936b08dd08 | 5 months ago |
|
|
30d8a38a3b | 5 months ago |
|
|
e4e2c6caaf | 5 months ago |
|
|
5abdde1117 | 5 months ago |
|
|
dc7fae973b | 5 months ago |
|
|
885c6de65e | 5 months ago |
|
|
07d1078010 | 5 months ago |
|
|
154331e455 | 5 months ago |
|
|
b8d3f86b12 | 5 months ago |
|
|
0e5f47f145 | 5 months ago |
|
|
3093d0bb2d | 5 months ago |
|
|
2fd88298ae | 5 months ago |
|
|
1386529493 | 5 months ago |
|
|
618eccc0f3 | 5 months ago |
|
|
9e3377c0a8 | 5 months ago |
|
|
e3241912f7 | 5 months ago |
|
|
9b6fc117f9 | 5 months ago |
|
|
4cad51a629 | 6 months ago |
|
|
7fb7567809 | 6 months ago |
|
|
5908936f8c | 6 months ago |
|
|
64feda250e | 6 months ago |
|
|
17bee70dc2 | 6 months ago |
|
|
4529a217e8 | 6 months ago |
|
|
fde2dda87e | 6 months ago |
|
|
65db935c57 | 6 months ago |
|
|
868d77a402 | 6 months ago |
|
|
d6e74ad663 | 6 months ago |
|
|
32f6d0c358 | 6 months ago |
|
|
53ab2b2a4d | 6 months ago |
|
|
7f84874755 | 6 months ago |
|
|
dfafa1430e | 6 months ago |
|
|
d240a78af3 | 6 months ago |
|
|
fa22aa1c8e | 6 months ago |
|
|
2ae35c8a15 | 6 months ago |
|
|
058787ff83 | 7 months ago |
|
|
573303ac73 | 7 months ago |
|
|
bb18d9e77a | 7 months ago |
|
|
d818b201b3 | 7 months ago |
|
|
2ff904951c | 7 months ago |
|
|
d4adce5d7e | 7 months ago |
|
|
8ccaa48d2a | 7 months ago |
|
|
d6b5ea0787 | 7 months ago |
|
|
8090943031 | 7 months ago |
|
|
945933854a | 7 months ago |
|
|
72cfc785db | 7 months ago |
|
|
c1296b5d75 | 7 months ago |
|
|
4dc286371f | 7 months ago |
|
|
55b0ece0e7 | 7 months ago |
|
|
3cba11a126 | 7 months ago |
|
|
98ae5e2c32 | 7 months ago |
|
|
f330c2b158 | 7 months ago |
|
|
8ddc181403 | 7 months ago |
|
|
239ee673da | 7 months ago |
|
|
be4a214820 | 7 months ago |
|
|
08f0eca1c2 | 7 months ago |
|
|
d481a035c3 | 7 months ago |
|
|
432ee9a844 | 7 months ago |
|
|
32b346371d | 7 months ago |
|
|
41bee1a693 | 7 months ago |
|
|
2598941384 | 8 months ago |
|
|
667dd4237a | 8 months ago |
|
|
0d6aa99370 | 8 months ago |
|
|
5f33849311 | 8 months ago |
|
|
0e6d795db0 | 8 months ago |
|
|
5ec61c90a6 | 8 months ago |
|
|
d4a19ae9d3 | 8 months ago |
|
|
d224a61556 | 8 months ago |
|
|
837111c6d5 | 8 months ago |
|
|
dd4b5755ef | 8 months ago |
|
|
bd774b7489 | 8 months ago |
|
|
208e37abef | 8 months ago |
|
|
9dee94641f | 8 months ago |
|
|
0187418697 | 8 months ago |
|
|
e5a56a833c | 8 months ago |
|
|
8ed4e9d41c | 8 months ago |
|
|
8abafe8c8e | 8 months ago |
|
|
375cdbe5cf | 8 months ago |
|
|
d3413372b4 | 8 months ago |
|
|
54ad3341f1 | 8 months ago |
|
|
b382855aa2 | 8 months ago |
|
|
65a81121c5 | 8 months ago |
|
|
229fd67e97 | 8 months ago |
|
|
07028c190b | 8 months ago |
|
|
566a4c1e3c | 8 months ago |
|
|
6cec613daa | 8 months ago |
|
|
49fea37879 | 8 months ago |
|
|
8faae13d7e | 8 months ago |
|
|
ab63864ace | 8 months ago |
|
|
e19397ee59 | 8 months ago |
|
|
a00263d963 | 8 months ago |
|
|
68f17420e2 | 8 months ago |
|
|
c31d902dc8 | 8 months ago |
|
|
4c41bf02f1 | 8 months ago |
|
|
a9048f0f7d | 8 months ago |
|
|
491d438427 | 8 months ago |
|
|
dcff603267 | 9 months ago |
|
|
c146682e2e | 9 months ago |
|
|
8e25944c94 | 9 months ago |
|
|
4a75648774 | 9 months ago |
|
|
fa28024810 | 9 months ago |
|
|
27b4b77bba | 9 months ago |
|
|
1f737568b2 | 9 months ago |
|
|
9b17f2839c | 9 months ago |
|
|
b130cd9f90 | 11 months ago |
|
|
fdbd1a8c9b | 11 months ago |
|
|
1e12edbf95 | 11 months ago |
|
|
f2e0e552ac | 11 months ago |
|
|
20e23b5bd9 | 11 months ago |
|
|
8e58c4a759 | 11 months ago |
|
|
67ececc804 | 11 months ago |
|
|
0388cd5c0f | 12 months ago |
|
|
a376daa04d | 12 months ago |
|
|
f659213159 | 12 months ago |
|
|
620bc3a944 | 12 months ago |
|
|
2095342245 | 12 months ago |
|
|
2c7eda1dc1 | 12 months ago |
|
|
0b216c815a | 12 months ago |
|
|
26507481b1 | 12 months ago |
|
|
78b440104e | 12 months ago |
|
|
c92df356e6 | 12 months ago |
|
|
11d2d70fd8 | 12 months ago |
|
|
5283e6756b | 12 months ago |
|
|
913090ea7e | 12 months ago |
|
|
a0d3858ff9 | 12 months ago |
|
|
77d87cd3cd | 12 months ago |
|
|
927fb172d8 | 12 months ago |
|
|
5f42da36f3 | 12 months ago |
|
|
a3768a0443 | 12 months ago |
|
|
657e40b982 | 12 months ago |
|
|
ae703b97a7 | 12 months ago |
|
|
8580da903c | 1 year ago |
|
|
9b91a1a529 | 1 year ago |
|
|
186404829d | 1 year ago |
|
|
45472c6edc | 1 year ago |
|
|
f82c72f539 | 1 year ago |
|
|
fca7578cdf | 1 year ago |
|
|
e97d20c9d1 | 1 year ago |
|
|
51c7b789f7 | 1 year ago |
|
|
356be2e65f | 1 year ago |
|
|
1b8b2c8b1a | 1 year ago |
|
|
d3da3ff769 | 1 year ago |
|
|
9342186b22 | 1 year ago |
|
|
67219c309a | 1 year ago |
|
|
6fcb7aae96 | 1 year ago |
|
|
90779fe846 | 1 year ago |
|
|
6698f4bcd9 | 1 year ago |
|
|
e564944c5b | 1 year ago |
|
|
3f068e7c83 | 1 year ago |
|
|
0953a931c0 | 1 year ago |
|
|
5c76941d1e | 1 year ago |
|
|
8b7354cb3a | 1 year ago |
|
|
53b6bf0292 | 1 year ago |
|
|
c39c47501a | 1 year ago |
|
|
211079f130 | 1 year ago |
|
|
8ebeaffaad | 1 year ago |
|
|
d0afae7806 | 1 year ago |
|
|
69a61732cd | 1 year ago |
|
|
0d1c84e727 | 1 year ago |
|
|
7330d68e19 | 1 year ago |
|
|
20e92a63ff | 1 year ago |
|
|
f032762012 | 1 year ago |
|
|
5ef8db92b6 | 1 year ago |
|
|
1195e39b55 | 1 year ago |
|
|
69b6552f3a | 1 year ago |
|
|
882dc0ca06 | 1 year ago |
|
|
34d9df101f | 1 year ago |
|
|
3ee6a0ff93 | 1 year ago |
|
|
f1f8ad11ee | 1 year ago |
|
|
11a6ccfe71 | 1 year ago |
|
|
945e360363 | 1 year ago |
|
|
e8005ece3a | 1 year ago |
|
|
5e6d7bf4fb | 1 year ago |
|
|
6900e88dfd | 1 year ago |
|
|
941da31735 | 1 year ago |
|
|
d033f7b057 | 1 year ago |
|
|
7a828e7510 | 1 year ago |
|
|
53b4881628 | 1 year ago |
|
|
288b0051d2 | 1 year ago |
|
|
ffb1709630 | 1 year ago |
|
|
24ba734ff9 | 1 year ago |
|
|
d68368e549 | 1 year ago |
|
|
0b99169f42 | 1 year ago |
|
|
b36806ab7f | 1 year ago |
|
|
1cd7ea18d3 | 1 year ago |
|
|
d85d9a25ee | 1 year ago |
|
|
4a2ba525a7 | 1 year ago |
|
|
dd41ddf89b | 1 year ago |
|
|
8e64459bbf | 1 year ago |
|
|
e120cd2cae | 1 year ago |
|
|
61c8267605 | 1 year ago |
|
|
d2db3c3840 | 1 year ago |
|
|
6cf6f69751 | 1 year ago |
|
|
905b87b71a | 1 year ago |
|
|
5ae5bb94ac | 1 year ago |
|
|
6da2c6a80f | 1 year ago |
|
|
f50a61f981 | 1 year ago |
|
|
6d9f2e12d9 | 1 year ago |
|
|
0d09174031 | 1 year ago |
|
|
3a1b5ec620 | 1 year ago |
|
|
8cfcb66cda | 1 year ago |
|
|
9e0dad2a1a | 1 year ago |
|
|
9189c01c16 | 1 year ago |
|
|
c7df5c97c1 | 1 year ago |
|
|
5895ccadd2 | 1 year ago |
|
|
43cc937bc6 | 1 year ago |
|
|
a35b208acd | 1 year ago |
|
|
757527635d | 1 year ago |
|
|
d28dd09e23 | 1 year ago |
|
|
df8f11d731 | 1 year ago |
|
|
06df62c8b8 | 1 year ago |
|
|
88e7c568d2 | 1 year ago |
|
|
833e2845e9 | 1 year ago |
|
|
89244703ff | 1 year ago |
|
|
66ea10d577 | 1 year ago |
|
|
04f7b7a282 | 1 year ago |
|
|
ec9b3e5c5d | 1 year ago |
|
|
06a82d3944 | 1 year ago |
|
|
26c4c33ad3 | 1 year ago |
|
|
7634e2c469 | 1 year ago |
|
|
0526f8e167 | 1 year ago |
|
|
7e5b064139 | 1 year ago |
|
|
21e002af2d | 1 year ago |
|
|
8a34b925a4 | 1 year ago |
|
|
257d602a11 | 1 year ago |
|
|
cdfaf31ebc | 1 year ago |
|
|
a1d079acd7 | 1 year ago |
|
|
d35ca3e4af | 1 year ago |
|
|
c4ca015266 | 1 year ago |
|
|
a07489dbd4 | 1 year ago |
|
|
bf6607e27e | 1 year ago |
|
|
3b2b03bd97 | 1 year ago |
|
|
e9bddf0c03 | 1 year ago |
|
|
f384fc33d0 | 1 year ago |
|
|
28e08ef94c | 1 year ago |
|
|
9859e44ee8 | 1 year ago |
|
|
c45b13bee3 | 1 year ago |
|
|
5e816be12c | 1 year ago |
|
|
825a84a0d1 | 1 year ago |
|
|
5d6a185242 | 1 year ago |
|
|
24e39b241f | 1 year ago |
|
|
47e25eb8c5 | 1 year ago |
|
|
8dec038941 | 1 year ago |
|
|
b91407a779 | 1 year ago |
|
|
11fe832a79 | 1 year ago |
|
|
62b75f7750 | 1 year ago |
|
|
9cdd51cf5b | 1 year ago |
|
|
e2c112d2fe | 1 year ago |
|
|
34d441fb87 | 1 year ago |
|
|
e3b16d6d13 | 1 year ago |
|
|
77a01ff8d6 | 1 year ago |
|
|
fb76f2eeea | 1 year ago |
|
|
14cb8be7e5 | 1 year ago |
|
|
2c4316fa16 | 1 year ago |
|
|
6053e1b5cf | 1 year ago |
|
|
45ab5344d5 | 1 year ago |
|
|
c395b13184 | 1 year ago |
|
|
8bf4eb2ce9 | 1 year ago |
|
|
0e9c890637 | 1 year ago |
|
|
90ba0a74eb | 1 year ago |
|
|
1773c9aba6 | 1 year ago |
|
|
9f0566b522 | 1 year ago |
|
|
69edac7b98 | 1 year ago |
|
|
8362d61462 | 1 year ago |
|
|
6e4336ce0e | 1 year ago |
|
|
61b800781b | 1 year ago |
|
|
298d28a650 | 1 year ago |
|
|
3f288f934a | 1 year ago |
|
|
8f7ec88a9f | 1 year ago |
|
|
b05b2c8c8e | 1 year ago |
|
|
c9f2d905a0 | 1 year ago |
|
|
3504bea3bb | 1 year ago |
|
|
de3c6dcdc9 | 1 year ago |
|
|
6accc87da1 | 1 year ago |
|
|
17d3f39e44 | 1 year ago |
|
|
551690ee1d | 1 year ago |
|
|
3bdd3e237a | 1 year ago |
|
|
d0993e9918 | 1 year ago |
|
|
ab2a921744 | 1 year ago |
|
|
809d169d36 | 1 year ago |
|
|
c63dc0e080 | 1 year ago |
|
|
a51909ea79 | 1 year ago |
|
|
4f60d01f09 | 1 year ago |
|
|
27214517a7 | 1 year ago |
|
|
c6c8bfb690 | 1 year ago |
|
|
8b92e09655 | 1 year ago |
|
|
b926795973 | 1 year ago |
|
|
2e2dfb147e | 1 year ago |
|
|
0e7eefbc70 | 1 year ago |
|
|
34088a8b7f | 1 year ago |
|
|
0a908d76da | 1 year ago |
|
|
b1fd6038bf | 1 year ago |
|
|
c6cf08ab39 | 1 year ago |
|
|
ce6297b0e9 | 1 year ago |
|
|
cea2e7b98d | 1 year ago |
|
|
2ba1b2b3f8 | 1 year ago |
|
|
b8b15580af | 1 year ago |
|
|
d032c591c2 | 1 year ago |
|
|
315204271e | 1 year ago |
|
|
a3192d2beb | 1 year ago |
|
|
598de81143 | 1 year ago |
|
|
7238403392 | 1 year ago |
|
|
7c92b8ef2b | 1 year ago |
|
|
7d9eebdb9a | 1 year ago |
|
|
16c602aaa2 | 1 year ago |
|
|
4b4bfdc0f3 | 1 year ago |
|
|
a8c0a414b8 | 1 year ago |
|
|
f76ccbf8ed | 1 year ago |
|
|
fc7b7eaba1 | 1 year ago |
|
|
79ed797bad | 1 year ago |
|
|
f3097b5743 | 1 year ago |
|
|
be288ad398 | 1 year ago |
|
|
979241e171 | 1 year ago |
|
|
46c9f772d8 | 1 year ago |
|
|
289a78040d | 1 year ago |
|
|
5af6534a70 | 1 year ago |
|
|
0bd3c6cba5 | 1 year ago |
|
|
ce1accedbc | 1 year ago |
|
|
84a4fcdf00 | 1 year ago |
|
|
5ab872f289 | 1 year ago |
|
|
06617f8231 | 1 year ago |
|
|
c95d41128f | 1 year ago |
|
|
d15051b187 | 1 year ago |
|
|
6fbad3ae7d | 1 year ago |
|
|
c1c33297ac | 1 year ago |
|
|
c948e6668a | 1 year ago |
|
|
2edcb31996 | 1 year ago |
|
|
022f0c4b5f | 1 year ago |
|
|
357fe38766 | 1 year ago |
|
|
85b1b4070a | 1 year ago |
|
|
863f923f14 | 1 year ago |
|
|
40695f413b | 1 year ago |
|
|
9185805bf2 | 1 year ago |
|
|
8613f685ab | 1 year ago |
|
|
fe26b70902 | 2 years ago |
|
|
62cde17150 | 2 years ago |
|
|
e334b50d9d | 2 years ago |
|
|
8bec30d97c | 2 years ago |
|
|
fe435bb7d0 | 2 years ago |
|
|
924dbd6f0c | 2 years ago |
|
|
f5a8840668 | 2 years ago |
|
|
23d9d0bc82 | 2 years ago |
|
|
7079a07a15 | 2 years ago |
|
|
65c8a42c13 | 2 years ago |
|
|
05d98e5b49 | 2 years ago |
|
|
0ce9ffc464 | 2 years ago |
|
|
60f868290d | 2 years ago |
|
|
d2eefc06aa | 2 years ago |
|
|
ec05e542b4 | 2 years ago |
|
|
0f34e2505b | 2 years ago |
|
|
4996ec2f09 | 2 years ago |
|
|
efdd82d1ab | 2 years ago |
|
|
bb9c51b3e9 | 2 years ago |
|
|
8c93973f98 | 2 years ago |
|
|
d5e9186289 | 2 years ago |
|
|
c4cf0d5ba2 | 2 years ago |
|
|
e1b2f38c8e | 2 years ago |
|
|
37ebce7e6e | 2 years ago |
|
|
a3644963c4 | 2 years ago |
|
|
cca651da1f | 2 years ago |
|
|
45c42d386a | 2 years ago |
|
|
fa1d21747f | 2 years ago |
|
|
2333b9aced | 2 years ago |
|
|
fe54b0ac3f | 2 years ago |
|
|
58235e3675 | 2 years ago |
|
|
933477fcbe | 2 years ago |
|
|
5d789faee5 | 2 years ago |
|
|
b822f20007 | 2 years ago |
|
|
813f253d6b | 2 years ago |
|
|
d7979c3597 | 2 years ago |
|
|
dfc3c7d516 | 2 years ago |
|
|
21e874e60c | 2 years ago |
|
|
50efa53f8f | 2 years ago |
|
|
a9d32a7708 | 2 years ago |
|
|
fc24b3f25e | 2 years ago |
|
|
e97ab2f597 | 2 years ago |
|
|
a210c37f70 | 2 years ago |
|
|
123efa7510 | 2 years ago |
|
|
fe8a3a71fc | 2 years ago |
|
|
92c00d913e | 2 years ago |
|
|
5ad3d14ceb | 2 years ago |
|
|
3a31a7d886 | 2 years ago |
|
|
1031551dd9 | 2 years ago |
|
|
2973d90670 | 2 years ago |
|
|
e2f4d9275c | 2 years ago |
|
|
c2ad52e54e | 2 years ago |
|
|
a6a5c5bb97 | 2 years ago |
|
|
2839954559 | 2 years ago |
|
|
adfd4e17f3 | 2 years ago |
|
|
591152bef0 | 2 years ago |
|
|
a6c89751f9 | 2 years ago |
|
|
8b574f234d | 2 years ago |
|
|
bde7f062b9 | 2 years ago |
|
|
9a9dd66ba0 | 2 years ago |
|
|
fc3e788cb4 | 2 years ago |
|
|
f9a6748154 | 2 years ago |
|
|
b7188c1cad | 2 years ago |
|
|
e580258071 | 2 years ago |
|
|
798032b979 | 2 years ago |
|
|
3f105d5169 | 2 years ago |
|
|
d839cbfaf2 | 2 years ago |
|
|
63457b4866 | 2 years ago |
|
|
6aa4fd3573 | 2 years ago |
|
|
85f9261c9a | 3 years ago |
|
|
4089e875a9 | 3 years ago |
|
|
57b5be3589 | 3 years ago |
|
|
49c54386b3 | 3 years ago |
|
|
98d110ed16 | 3 years ago |
|
|
6258365df6 | 3 years ago |
|
|
88f15a8169 | 3 years ago |
|
|
270c3a25de | 3 years ago |
|
|
ec212a10d8 | 3 years ago |
|
|
455fd2bcdf | 3 years ago |
|
|
f18f5165cd | 3 years ago |
|
|
5602445709 | 3 years ago |
|
|
19b79f7ab5 | 3 years ago |
|
|
330375be32 | 3 years ago |
|
|
317a2abd57 | 3 years ago |
|
|
d488b5ebb3 | 3 years ago |
|
|
8151577b75 | 3 years ago |
|
|
21cb4a3472 | 3 years ago |
|
|
1871f2a9b1 | 3 years ago |
|
|
a47b9f3631 | 3 years ago |
|
|
392c41d160 | 3 years ago |
|
|
0af2ce8c30 | 3 years ago |
|
|
03acf40315 | 3 years ago |
|
|
572636a9d3 | 3 years ago |
|
|
a3a10cb32e | 3 years ago |
|
|
7d79c56cb6 | 3 years ago |
|
|
edd2868ef6 | 3 years ago |
|
|
1ed932e8d5 | 3 years ago |
|
|
900760e913 | 3 years ago |
|
|
526422b74b | 3 years ago |
|
|
2e8bf73877 | 3 years ago |
|
|
99fe9d48e6 | 3 years ago |
|
|
4a5d52961f | 3 years ago |
|
|
00dab14111 | 3 years ago |
|
|
39dfd2dfe8 | 3 years ago |
|
|
8cda5f5537 | 4 years ago |
|
|
f1503874de | 4 years ago |
|
|
ad4b686836 | 4 years ago |
|
|
e8ad12e881 | 4 years ago |
|
|
195b400087 | 4 years ago |
|
|
db114d3bb2 | 4 years ago |
|
|
63e39c1ac5 | 4 years ago |
|
|
e36bbde9ac | 4 years ago |
|
|
eb4a7e0ad5 | 4 years ago |
|
|
64819ecb5f | 4 years ago |
|
|
24c845379a | 4 years ago |
|
|
db0ffae352 | 4 years ago |
|
|
c32577295a | 4 years ago |
|
|
216e7c9150 | 4 years ago |
|
|
8e79488768 | 4 years ago |
|
|
f070767dad | 4 years ago |
|
|
c1e72b8225 | 4 years ago |
|
|
a30a743ce7 | 4 years ago |
|
|
d71fb672e8 | 4 years ago |
|
|
c0d3deeac5 | 4 years ago |
|
|
b1e67cc7df | 4 years ago |
|
|
89c0cc94d1 | 4 years ago |
|
|
25ea6dde02 | 4 years ago |
|
|
0ff9c6e579 | 4 years ago |
|
|
11a61acb32 | 4 years ago |
|
|
e101cc4f44 | 4 years ago |
|
|
a743e831c6 | 4 years ago |
|
|
31b3a4eb4a | 4 years ago |
|
|
3dbb0e28ce | 4 years ago |
|
|
109feec6d5 | 4 years ago |
|
|
18c89de5a9 | 4 years ago |
|
|
566d75d82f | 4 years ago |
|
|
db9e52ce8e | 4 years ago |
|
|
96e20a09d6 | 4 years ago |
|
|
0417d4d73a | 4 years ago |
|
|
1287d58a54 | 4 years ago |
|
|
65809a6f0f | 4 years ago |
|
|
caa20be43e | 4 years ago |
|
|
c4f1cc150d | 4 years ago |
|
|
a8317c2393 | 4 years ago |
|
|
2a95d039ab | 4 years ago |
|
|
af03b9a9b3 | 4 years ago |
|
|
104865e866 | 4 years ago |
|
|
ccca77bcc0 | 4 years ago |
|
|
63543b3b83 | 4 years ago |
|
|
c87976af40 | 4 years ago |
|
|
c9318a26f6 | 4 years ago |
|
|
491458673b | 4 years ago |
|
|
5805e30232 | 4 years ago |
|
|
a167f164e4 | 4 years ago |
|
|
7c4982ebee | 4 years ago |
|
|
d2ca8a9423 | 4 years ago |
|
|
47699e15aa | 4 years ago |
|
|
0fa0a93f55 | 4 years ago |
|
|
60c4ae5599 | 4 years ago |
|
|
5b8f7dd1be | 4 years ago |
|
|
e8c3fe7881 | 4 years ago |
@ -1,11 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import ci_lib
|
|
||||||
|
|
||||||
batches = [
|
|
||||||
[
|
|
||||||
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
|
|
||||||
]
|
|
||||||
]
|
|
||||||
|
|
||||||
ci_lib.run_batches(batches)
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
|
|
||||||
parameters:
|
|
||||||
name: ''
|
|
||||||
pool: ''
|
|
||||||
sign: false
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- task: UsePythonVersion@0
|
|
||||||
displayName: Install python
|
|
||||||
inputs:
|
|
||||||
versionSpec: '$(python.version)'
|
|
||||||
condition: ne(variables['python.version'], '')
|
|
||||||
|
|
||||||
- script: python -mpip install tox
|
|
||||||
displayName: Install tooling
|
|
||||||
|
|
||||||
- script: python -mtox -e "$(tox.env)"
|
|
||||||
displayName: "Run tests"
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: $(AWS_ACCESS_KEY_ID)
|
|
||||||
AWS_SECRET_ACCESS_KEY: $(AWS_SECRET_ACCESS_KEY)
|
|
||||||
AWS_DEFAULT_REGION: $(AWS_DEFAULT_REGION)
|
|
||||||
@ -1,261 +0,0 @@
|
|||||||
# Python package
|
|
||||||
# Create and test a Python package on multiple Python versions.
|
|
||||||
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
|
|
||||||
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
|
|
||||||
|
|
||||||
# User defined variables are also injected as environment variables
|
|
||||||
# https://docs.microsoft.com/en-us/azure/devops/pipelines/process/variables#environment-variables
|
|
||||||
#variables:
|
|
||||||
#ANSIBLE_VERBOSITY: 3
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- job: Mac1015
|
|
||||||
# vanilla Ansible is really slow
|
|
||||||
timeoutInMinutes: 120
|
|
||||||
steps:
|
|
||||||
- template: azure-pipelines-steps.yml
|
|
||||||
pool:
|
|
||||||
# https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
|
|
||||||
vmImage: macOS-10.15
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
Mito_27:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen
|
|
||||||
Mito_36:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen
|
|
||||||
Mito_39:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen
|
|
||||||
|
|
||||||
# TODO: test python3, python3 tests are broken
|
|
||||||
Loc_27_210:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_localhost-ansible2.10
|
|
||||||
Loc_27_3:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_localhost-ansible3
|
|
||||||
Loc_27_4:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_localhost-ansible4
|
|
||||||
|
|
||||||
# NOTE: this hangs when ran in Ubuntu 18.04
|
|
||||||
Van_27_210:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_localhost-ansible2.10
|
|
||||||
STRATEGY: linear
|
|
||||||
ANSIBLE_SKIP_TAGS: resource_intensive
|
|
||||||
Van_27_3:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_localhost-ansible3
|
|
||||||
STRATEGY: linear
|
|
||||||
ANSIBLE_SKIP_TAGS: resource_intensive
|
|
||||||
Van_27_4:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_localhost-ansible4
|
|
||||||
STRATEGY: linear
|
|
||||||
ANSIBLE_SKIP_TAGS: resource_intensive
|
|
||||||
|
|
||||||
- job: Mac11
|
|
||||||
# vanilla Ansible is really slow
|
|
||||||
timeoutInMinutes: 120
|
|
||||||
steps:
|
|
||||||
- template: azure-pipelines-steps.yml
|
|
||||||
pool:
|
|
||||||
# https://github.com/actions/virtual-environments/blob/main/images/macos/
|
|
||||||
vmImage: macOS-11
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
Mito_27:
|
|
||||||
tox.env: py27-mode_mitogen
|
|
||||||
Mito_37:
|
|
||||||
python.version: '3.7'
|
|
||||||
tox.env: py37-mode_mitogen
|
|
||||||
Mito_39:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen
|
|
||||||
|
|
||||||
# TODO: test python3, python3 tests are broken
|
|
||||||
Loc_27_210:
|
|
||||||
tox.env: py27-mode_localhost-ansible2.10
|
|
||||||
Loc_27_3:
|
|
||||||
tox.env: py27-mode_localhost-ansible3
|
|
||||||
Loc_27_4:
|
|
||||||
tox.env: py27-mode_localhost-ansible4
|
|
||||||
|
|
||||||
# NOTE: this hangs when ran in Ubuntu 18.04
|
|
||||||
Van_27_210:
|
|
||||||
tox.env: py27-mode_localhost-ansible2.10
|
|
||||||
STRATEGY: linear
|
|
||||||
ANSIBLE_SKIP_TAGS: resource_intensive
|
|
||||||
Van_27_3:
|
|
||||||
tox.env: py27-mode_localhost-ansible3
|
|
||||||
STRATEGY: linear
|
|
||||||
ANSIBLE_SKIP_TAGS: resource_intensive
|
|
||||||
Van_27_4:
|
|
||||||
tox.env: py27-mode_localhost-ansible4
|
|
||||||
STRATEGY: linear
|
|
||||||
ANSIBLE_SKIP_TAGS: resource_intensive
|
|
||||||
|
|
||||||
- job: Linux
|
|
||||||
pool:
|
|
||||||
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
|
|
||||||
vmImage: "Ubuntu 18.04"
|
|
||||||
steps:
|
|
||||||
- template: azure-pipelines-steps.yml
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
Mito_27_centos6:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_centos6
|
|
||||||
Mito_27_centos7:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_centos7
|
|
||||||
Mito_27_centos8:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_centos8
|
|
||||||
Mito_27_debian9:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_debian9
|
|
||||||
Mito_27_debian10:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_debian10
|
|
||||||
Mito_27_debian11:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_debian11
|
|
||||||
Mito_27_ubuntu1604:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_ubuntu1604
|
|
||||||
Mito_27_ubuntu1804:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_ubuntu1804
|
|
||||||
Mito_27_ubuntu2004:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_mitogen-distro_ubuntu2004
|
|
||||||
|
|
||||||
Mito_36_centos6:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_centos6
|
|
||||||
Mito_36_centos7:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_centos7
|
|
||||||
Mito_36_centos8:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_centos8
|
|
||||||
Mito_36_debian9:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_debian9
|
|
||||||
Mito_36_debian10:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_debian10
|
|
||||||
Mito_36_debian11:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_debian11
|
|
||||||
Mito_36_ubuntu1604:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_ubuntu1604
|
|
||||||
Mito_36_ubuntu1804:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_ubuntu1804
|
|
||||||
Mito_36_ubuntu2004:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_mitogen-distro_ubuntu2004
|
|
||||||
|
|
||||||
Mito_39_centos6:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_centos6
|
|
||||||
Mito_39_centos7:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_centos7
|
|
||||||
Mito_39_centos8:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_centos8
|
|
||||||
Mito_39_debian9:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_debian9
|
|
||||||
Mito_39_debian10:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_debian10
|
|
||||||
Mito_39_debian11:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_debian11
|
|
||||||
Mito_39_ubuntu1604:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_ubuntu1604
|
|
||||||
Mito_39_ubuntu1804:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_ubuntu1804
|
|
||||||
Mito_39_ubuntu2004:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_mitogen-distro_ubuntu2004
|
|
||||||
|
|
||||||
#DebOps_2460_27_27:
|
|
||||||
#python.version: '2.7'
|
|
||||||
#MODE: debops_common
|
|
||||||
#VER: 2.4.6.0
|
|
||||||
|
|
||||||
#DebOps_262_36_27:
|
|
||||||
#python.version: '3.6'
|
|
||||||
#MODE: debops_common
|
|
||||||
#VER: 2.6.2
|
|
||||||
|
|
||||||
#Ansible_2460_26:
|
|
||||||
#python.version: '2.7'
|
|
||||||
#MODE: ansible
|
|
||||||
#VER: 2.4.6.0
|
|
||||||
|
|
||||||
#Ansible_262_26:
|
|
||||||
#python.version: '2.7'
|
|
||||||
#MODE: ansible
|
|
||||||
#VER: 2.6.2
|
|
||||||
|
|
||||||
#Ansible_2460_36:
|
|
||||||
#python.version: '3.6'
|
|
||||||
#MODE: ansible
|
|
||||||
#VER: 2.4.6.0
|
|
||||||
|
|
||||||
#Ansible_262_36:
|
|
||||||
#python.version: '3.6'
|
|
||||||
#MODE: ansible
|
|
||||||
#VER: 2.6.2
|
|
||||||
|
|
||||||
#Vanilla_262_27:
|
|
||||||
#python.version: '2.7'
|
|
||||||
#MODE: ansible
|
|
||||||
#VER: 2.6.2
|
|
||||||
#DISTROS: debian
|
|
||||||
#STRATEGY: linear
|
|
||||||
|
|
||||||
Ans_27_210:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_ansible-ansible2.10
|
|
||||||
Ans_27_3:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_ansible-ansible3
|
|
||||||
Ans_27_4:
|
|
||||||
python.version: '2.7'
|
|
||||||
tox.env: py27-mode_ansible-ansible4
|
|
||||||
|
|
||||||
Ans_36_210:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_ansible-ansible2.10
|
|
||||||
Ans_36_3:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_ansible-ansible3
|
|
||||||
Ans_36_4:
|
|
||||||
python.version: '3.6'
|
|
||||||
tox.env: py36-mode_ansible-ansible4
|
|
||||||
|
|
||||||
Ans_39_210:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_ansible-ansible2.10
|
|
||||||
Ans_39_3:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_ansible-ansible3
|
|
||||||
Ans_39_4:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_ansible-ansible4
|
|
||||||
Ans_39_5:
|
|
||||||
python.version: '3.9'
|
|
||||||
tox.env: py39-mode_ansible-ansible5
|
|
||||||
@ -0,0 +1,13 @@
|
|||||||
|
# shellcheck shell=bash
|
||||||
|
|
||||||
|
# Tox environment name -> Python executable name (e.g. py312-m_mtg -> python3.12)
|
||||||
|
toxenv-python() {
|
||||||
|
local pattern='^py([23])([0-9]{1,2}).*'
|
||||||
|
if [[ $1 =~ $pattern ]]; then
|
||||||
|
echo "python${BASH_REMATCH[1]}.${BASH_REMATCH[2]}"
|
||||||
|
return
|
||||||
|
else
|
||||||
|
echo "${FUNCNAME[0]}: $1: environment name not recognised" >&2
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
@ -0,0 +1,18 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -o errexit
|
||||||
|
set -o nounset
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
VERSION="$1"
|
||||||
|
|
||||||
|
curl \
|
||||||
|
--fail \
|
||||||
|
--location \
|
||||||
|
--no-progress-meter \
|
||||||
|
--remote-name \
|
||||||
|
"https://downloads.sourceforge.net/project/sshpass/sshpass/${VERSION}/sshpass-${VERSION}.tar.gz"
|
||||||
|
tar xvf "sshpass-${VERSION}.tar.gz"
|
||||||
|
cd "sshpass-${VERSION}"
|
||||||
|
./configure
|
||||||
|
sudo make install
|
||||||
@ -1,8 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import ci_lib
|
|
||||||
|
|
||||||
batches = [
|
|
||||||
]
|
|
||||||
|
|
||||||
ci_lib.run_batches(batches)
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import ci_lib
|
|
||||||
|
|
||||||
batches = [
|
|
||||||
]
|
|
||||||
|
|
||||||
if ci_lib.have_docker():
|
|
||||||
batches.append([
|
|
||||||
'aws ecr-public get-login-password | docker login --username AWS --password-stdin public.ecr.aws',
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
ci_lib.run_batches(batches)
|
|
||||||
@ -0,0 +1,33 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
|
INDENT=" "
|
||||||
|
POSSIBLE_PYTHONS=(
|
||||||
|
python
|
||||||
|
python2
|
||||||
|
python3
|
||||||
|
/usr/bin/python
|
||||||
|
/usr/bin/python2
|
||||||
|
/usr/bin/python3
|
||||||
|
# GitHub macOS 12 images: python2.7 is installed, but not on $PATH
|
||||||
|
/Library/Frameworks/Python.framework/Versions/2.7/bin/python2.7
|
||||||
|
)
|
||||||
|
|
||||||
|
for p in "${POSSIBLE_PYTHONS[@]}"; do
|
||||||
|
echo "$p"
|
||||||
|
if [[ ${p:0:1} == "/" && -e $p ]]; then
|
||||||
|
:
|
||||||
|
elif type "$p" > /dev/null 2>&1; then
|
||||||
|
type "$p" 2>&1 | sed -e "s/^/${INDENT}type: /"
|
||||||
|
else
|
||||||
|
echo "${INDENT}Not present"
|
||||||
|
echo
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
$p -c "import sys; print('${INDENT}version: %d.%d.%d' % sys.version_info[:3])"
|
||||||
|
# macOS builders lack a realpath command
|
||||||
|
$p -c "import os.path; print('${INDENT}realpath: %s' % os.path.realpath('$(type -p "$p")'))"
|
||||||
|
$p -c "import sys; print('${INDENT}sys.executable: %s' % sys.executable)"
|
||||||
|
echo
|
||||||
|
done
|
||||||
@ -1,33 +0,0 @@
|
|||||||
---
|
|
||||||
name: Mitogen 0.2.x bug report
|
|
||||||
about: Report a bug in Mitogen 0.2.x (for Ansible 2.5, 2.6, 2.7, 2.8, or 2.9)
|
|
||||||
title: ''
|
|
||||||
labels: affects-0.2, bug
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Please drag-drop large logs as text file attachments.
|
|
||||||
|
|
||||||
Feel free to write an issue in your preferred format, however if in doubt, use
|
|
||||||
the following checklist as a guide for what to include.
|
|
||||||
|
|
||||||
* Which version of Ansible are you running?
|
|
||||||
* Is your version of Ansible patched in any way?
|
|
||||||
* Are you running with any custom modules, or `module_utils` loaded?
|
|
||||||
|
|
||||||
* Have you tried the latest master version from Git?
|
|
||||||
* Do you have some idea of what the underlying problem may be?
|
|
||||||
https://mitogen.networkgenomics.com/ansible_detailed.html#common-problems has
|
|
||||||
instructions to help figure out the likely cause and how to gather relevant
|
|
||||||
logs.
|
|
||||||
* Mention your host and target OS and versions
|
|
||||||
* Mention your host and target Python versions
|
|
||||||
* If reporting a performance issue, mention the number of targets and a rough
|
|
||||||
description of your workload (lots of copies, lots of tiny file edits, etc.)
|
|
||||||
* If reporting a crash or hang in Ansible, please rerun with -vvv and include
|
|
||||||
200 lines of output around the point of the error, along with a full copy of
|
|
||||||
any traceback or error text in the log. Beware "-vvv" may include secret
|
|
||||||
data! Edit as necessary before posting.
|
|
||||||
* If reporting any kind of problem with Ansible, please include the Ansible
|
|
||||||
version along with output of "ansible-config dump --only-changed".
|
|
||||||
@ -1,33 +0,0 @@
|
|||||||
---
|
|
||||||
name: Mitogen 0.3.x bug report
|
|
||||||
about: Report a bug in Mitogen 0.3.x (for Ansible 2.10.x)
|
|
||||||
title: ''
|
|
||||||
labels: affects-0.3, bug
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Please drag-drop large logs as text file attachments.
|
|
||||||
|
|
||||||
Feel free to write an issue in your preferred format, however if in doubt, use
|
|
||||||
the following checklist as a guide for what to include.
|
|
||||||
|
|
||||||
* Which version of Ansible are you running?
|
|
||||||
* Is your version of Ansible patched in any way?
|
|
||||||
* Are you running with any custom modules, or `module_utils` loaded?
|
|
||||||
|
|
||||||
* Have you tried the latest master version from Git?
|
|
||||||
* Do you have some idea of what the underlying problem may be?
|
|
||||||
https://mitogen.networkgenomics.com/ansible_detailed.html#common-problems has
|
|
||||||
instructions to help figure out the likely cause and how to gather relevant
|
|
||||||
logs.
|
|
||||||
* Mention your host and target OS and versions
|
|
||||||
* Mention your host and target Python versions
|
|
||||||
* If reporting a performance issue, mention the number of targets and a rough
|
|
||||||
description of your workload (lots of copies, lots of tiny file edits, etc.)
|
|
||||||
* If reporting a crash or hang in Ansible, please rerun with -vvv and include
|
|
||||||
200 lines of output around the point of the error, along with a full copy of
|
|
||||||
any traceback or error text in the log. Beware "-vvv" may include secret
|
|
||||||
data! Edit as necessary before posting.
|
|
||||||
* If reporting any kind of problem with Ansible, please include the Ansible
|
|
||||||
version along with output of "ansible-config dump --only-changed".
|
|
||||||
@ -0,0 +1,62 @@
|
|||||||
|
name: Bug report
|
||||||
|
description: Report a bug in Mitogen 0.3.x (for Ansible 2.10 and above)
|
||||||
|
labels:
|
||||||
|
- affects-0.3
|
||||||
|
type: bug
|
||||||
|
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Description
|
||||||
|
description: >
|
||||||
|
When does the problem occur?
|
||||||
|
What happens after?
|
||||||
|
How is this different?
|
||||||
|
Did it previously behave as expected?
|
||||||
|
placeholder: |
|
||||||
|
When I do X, Y happens, but I was expecting Z because ...
|
||||||
|
Before version 1.2.3 it worked as expected.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Mitogen version
|
||||||
|
placeholder: 0.3.31, 0.3.3-9+deb12u1
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
attributes:
|
||||||
|
label: Ansible version (if applicable)
|
||||||
|
placeholder: 2.18.11
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: OS and environment
|
||||||
|
description: >
|
||||||
|
What operating system version(s), Python version(s), etc. are you using?
|
||||||
|
placeholder: |
|
||||||
|
Controller (master): Debian 13, Python 3.14
|
||||||
|
Targets (slaves): Ubuntu 20.04/Python 2.7, RHEL 10, ...
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Steps to reproduce
|
||||||
|
description: >
|
||||||
|
Instructions, code, or playbook(s) recreate the beahviour
|
||||||
|
value: |
|
||||||
|
Steps:
|
||||||
|
1. Set config `foo = 42` in somefile.cfg
|
||||||
|
2. Run the following Python or Playbook with `cmd --option bar ...`
|
||||||
|
|
||||||
|
```
|
||||||
|
Code or playbook here
|
||||||
|
```
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Anything else
|
||||||
|
description: >
|
||||||
|
Include any other details you think might be relevant or helpful.
|
||||||
|
Examples might include logs, unusual settings, environment variables, ...
|
||||||
@ -0,0 +1,211 @@
|
|||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions
|
||||||
|
|
||||||
|
name: Tests
|
||||||
|
|
||||||
|
# env:
|
||||||
|
# ANSIBLE_VERBOSITY: 3
|
||||||
|
# MITOGEN_LOG_LEVEL: DEBUG
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
push:
|
||||||
|
branches-ignore:
|
||||||
|
- docs-master
|
||||||
|
|
||||||
|
# https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners/about-github-hosted-runners
|
||||||
|
# https://github.com/actions/runner-images/blob/main/README.md#software-and-image-support
|
||||||
|
jobs:
|
||||||
|
u2204:
|
||||||
|
name: u2204 ${{ matrix.tox_env }}
|
||||||
|
# https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2204-Readme.md
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
timeout-minutes: 25
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- tox_env: py27-m_ans-ans2.10
|
||||||
|
- tox_env: py27-m_ans-ans4
|
||||||
|
|
||||||
|
- tox_env: py36-m_ans-ans2.10
|
||||||
|
- tox_env: py36-m_ans-ans4
|
||||||
|
|
||||||
|
- tox_env: py27-m_mtg
|
||||||
|
- tox_env: py36-m_mtg
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- run: .ci/show_python_versions
|
||||||
|
- name: Install deps
|
||||||
|
id: install-deps
|
||||||
|
run: |
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
source .ci/bash_functions
|
||||||
|
PYTHON="$(toxenv-python '${{ matrix.tox_env }}')"
|
||||||
|
|
||||||
|
sudo apt-get update
|
||||||
|
|
||||||
|
if [[ $PYTHON == "python2.7" ]]; then
|
||||||
|
sudo apt install -y python2-dev sshpass virtualenv
|
||||||
|
curl "https://bootstrap.pypa.io/pip/2.7/get-pip.py" --output "get-pip.py"
|
||||||
|
"$PYTHON" get-pip.py --user --no-python-version-warning
|
||||||
|
# Avoid Python 2.x pip masking system pip
|
||||||
|
rm -f ~/.local/bin/{easy_install,pip,wheel}
|
||||||
|
elif [[ $PYTHON == "python3.6" ]]; then
|
||||||
|
sudo apt install -y gcc-10 make libbz2-dev liblzma-dev libreadline-dev libsqlite3-dev libssl-dev sshpass virtualenv zlib1g-dev
|
||||||
|
curl --fail --silent --show-error --location https://pyenv.run | bash
|
||||||
|
CC=gcc-10 ~/.pyenv/bin/pyenv install --force 3.6
|
||||||
|
PYTHON="$HOME/.pyenv/versions/3.6.15/bin/python3.6"
|
||||||
|
fi
|
||||||
|
|
||||||
|
"$PYTHON" -m pip install -r "tests/requirements-tox.txt"
|
||||||
|
echo "python=$PYTHON" >> $GITHUB_OUTPUT
|
||||||
|
- name: Run tests
|
||||||
|
env:
|
||||||
|
GITHUB_ACTOR: ${{ github.actor }}
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
PYTHON="${{ steps.install-deps.outputs.python }}"
|
||||||
|
|
||||||
|
"$PYTHON" -m tox -e "${{ matrix.tox_env }}"
|
||||||
|
|
||||||
|
u2404:
|
||||||
|
name: u2404 ${{ matrix.tox_env }}
|
||||||
|
# https://github.com/actions/runner-images/blob/main/images/ubuntu/Ubuntu2404-Readme.md
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
timeout-minutes: 25
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- tox_env: py311-m_ans-ans2.10
|
||||||
|
python_version: '3.11'
|
||||||
|
- tox_env: py311-m_ans-ans3
|
||||||
|
python_version: '3.11'
|
||||||
|
- tox_env: py311-m_ans-ans4
|
||||||
|
python_version: '3.11'
|
||||||
|
- tox_env: py311-m_ans-ans5
|
||||||
|
python_version: '3.11'
|
||||||
|
- tox_env: py313-m_ans-ans6
|
||||||
|
python_version: '3.13'
|
||||||
|
- tox_env: py313-m_ans-ans7
|
||||||
|
python_version: '3.13'
|
||||||
|
- tox_env: py313-m_ans-ans8
|
||||||
|
python_version: '3.13'
|
||||||
|
- tox_env: py314-m_ans-ans9
|
||||||
|
python_version: '3.14'
|
||||||
|
- tox_env: py314-m_ans-ans10
|
||||||
|
python_version: '3.14'
|
||||||
|
- tox_env: py314-m_ans-ans11
|
||||||
|
python_version: '3.14'
|
||||||
|
- tox_env: py314-m_ans-ans12
|
||||||
|
python_version: '3.14'
|
||||||
|
- tox_env: py314-m_ans-ans13
|
||||||
|
python_version: '3.14'
|
||||||
|
|
||||||
|
- tox_env: py314-m_ans-ans13-s_lin
|
||||||
|
python_version: '3.14'
|
||||||
|
|
||||||
|
- tox_env: py314-m_mtg
|
||||||
|
python_version: '3.14'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python_version }}
|
||||||
|
if: ${{ matrix.python_version }}
|
||||||
|
- uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- run: .ci/show_python_versions
|
||||||
|
- name: Install deps
|
||||||
|
id: install-deps
|
||||||
|
run: |
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
source .ci/bash_functions
|
||||||
|
PYTHON="$(toxenv-python '${{ matrix.tox_env }}')"
|
||||||
|
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y sshpass virtualenv
|
||||||
|
|
||||||
|
"$PYTHON" -m pip install -r "tests/requirements-tox.txt"
|
||||||
|
echo "python=$PYTHON" >> $GITHUB_OUTPUT
|
||||||
|
- name: Run tests
|
||||||
|
env:
|
||||||
|
GITHUB_ACTOR: ${{ github.actor }}
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
PYTHON="${{ steps.install-deps.outputs.python }}"
|
||||||
|
|
||||||
|
"$PYTHON" -m tox -e "${{ matrix.tox_env }}"
|
||||||
|
|
||||||
|
macos:
|
||||||
|
name: macos ${{ matrix.tox_env }}
|
||||||
|
# https://github.com/actions/runner-images/blob/main/images/macos/macos-15-Readme.md
|
||||||
|
runs-on: macos-15
|
||||||
|
timeout-minutes: 15
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- tox_env: py314-m_lcl-ans13
|
||||||
|
python_version: '3.14'
|
||||||
|
- tox_env: py314-m_lcl-ans13-s_lin
|
||||||
|
python_version: '3.14'
|
||||||
|
|
||||||
|
- tox_env: py314-m_mtg
|
||||||
|
python_version: '3.14'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python_version }}
|
||||||
|
if: ${{ matrix.python_version }}
|
||||||
|
- run: .ci/show_python_versions
|
||||||
|
- run: .ci/install_sshpass ${{ matrix.sshpass_version }}
|
||||||
|
if: ${{ matrix.sshpass_version }}
|
||||||
|
- name: Install deps
|
||||||
|
id: install-deps
|
||||||
|
run: |
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
source .ci/bash_functions
|
||||||
|
PYTHON="$(toxenv-python '${{ matrix.tox_env }}')"
|
||||||
|
|
||||||
|
"$PYTHON" -m pip install -r "tests/requirements-tox.txt"
|
||||||
|
echo "python=$PYTHON" >> $GITHUB_OUTPUT
|
||||||
|
- name: Run tests
|
||||||
|
env:
|
||||||
|
GITHUB_ACTOR: ${{ github.actor }}
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -o errexit -o nounset -o pipefail
|
||||||
|
PYTHON="${{ steps.install-deps.outputs.python }}"
|
||||||
|
|
||||||
|
"$PYTHON" -m tox -e "${{ matrix.tox_env }}"
|
||||||
|
|
||||||
|
# https://github.com/marketplace/actions/alls-green
|
||||||
|
check:
|
||||||
|
if: always()
|
||||||
|
needs:
|
||||||
|
- u2204
|
||||||
|
- u2404
|
||||||
|
- macos
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: re-actors/alls-green@release/v1
|
||||||
|
with:
|
||||||
|
jobs: ${{ toJSON(needs) }}
|
||||||
@ -1,10 +0,0 @@
|
|||||||
path_classifiers:
|
|
||||||
library:
|
|
||||||
- "mitogen/compat"
|
|
||||||
- "ansible_mitogen/compat"
|
|
||||||
queries:
|
|
||||||
# Mitogen 2.4 compatibility trips this query everywhere, so just disable it
|
|
||||||
- exclude: py/unreachable-statement
|
|
||||||
- exclude: py/should-use-with
|
|
||||||
# mitogen.core.b() trips this query everywhere, so just disable it
|
|
||||||
- exclude: py/import-and-import-from
|
|
||||||
@ -1,318 +0,0 @@
|
|||||||
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
|
|
||||||
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
|
||||||
interchange format.
|
|
||||||
|
|
||||||
:mod:`simplejson` exposes an API familiar to users of the standard library
|
|
||||||
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
|
|
||||||
version of the :mod:`json` library contained in Python 2.6, but maintains
|
|
||||||
compatibility with Python 2.4 and Python 2.5 and (currently) has
|
|
||||||
significant performance advantages, even without using the optional C
|
|
||||||
extension for speedups.
|
|
||||||
|
|
||||||
Encoding basic Python object hierarchies::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
|
||||||
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
|
||||||
>>> print json.dumps("\"foo\bar")
|
|
||||||
"\"foo\bar"
|
|
||||||
>>> print json.dumps(u'\u1234')
|
|
||||||
"\u1234"
|
|
||||||
>>> print json.dumps('\\')
|
|
||||||
"\\"
|
|
||||||
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
|
|
||||||
{"a": 0, "b": 0, "c": 0}
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO()
|
|
||||||
>>> json.dump(['streaming API'], io)
|
|
||||||
>>> io.getvalue()
|
|
||||||
'["streaming API"]'
|
|
||||||
|
|
||||||
Compact encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
|
|
||||||
'[1,2,3,{"4":5,"6":7}]'
|
|
||||||
|
|
||||||
Pretty printing::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
|
|
||||||
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
|
|
||||||
{
|
|
||||||
"4": 5,
|
|
||||||
"6": 7
|
|
||||||
}
|
|
||||||
|
|
||||||
Decoding JSON::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
|
|
||||||
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
|
||||||
True
|
|
||||||
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
|
|
||||||
True
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO('["streaming API"]')
|
|
||||||
>>> json.load(io)[0] == 'streaming API'
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object decoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def as_complex(dct):
|
|
||||||
... if '__complex__' in dct:
|
|
||||||
... return complex(dct['real'], dct['imag'])
|
|
||||||
... return dct
|
|
||||||
...
|
|
||||||
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
|
||||||
... object_hook=as_complex)
|
|
||||||
(1+2j)
|
|
||||||
>>> import decimal
|
|
||||||
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def encode_complex(obj):
|
|
||||||
... if isinstance(obj, complex):
|
|
||||||
... return [obj.real, obj.imag]
|
|
||||||
... raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
...
|
|
||||||
>>> json.dumps(2 + 1j, default=encode_complex)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
|
|
||||||
|
|
||||||
Using simplejson.tool from the shell to validate and pretty-print::
|
|
||||||
|
|
||||||
$ echo '{"json":"obj"}' | python -m simplejson.tool
|
|
||||||
{
|
|
||||||
"json": "obj"
|
|
||||||
}
|
|
||||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
|
||||||
Expecting property name: line 1 column 2 (char 2)
|
|
||||||
"""
|
|
||||||
__version__ = '2.0.9'
|
|
||||||
__all__ = [
|
|
||||||
'dump', 'dumps', 'load', 'loads',
|
|
||||||
'JSONDecoder', 'JSONEncoder',
|
|
||||||
]
|
|
||||||
|
|
||||||
__author__ = 'Bob Ippolito <bob@redivi.com>'
|
|
||||||
|
|
||||||
from decoder import JSONDecoder
|
|
||||||
from encoder import JSONEncoder
|
|
||||||
|
|
||||||
_default_encoder = JSONEncoder(
|
|
||||||
skipkeys=False,
|
|
||||||
ensure_ascii=True,
|
|
||||||
check_circular=True,
|
|
||||||
allow_nan=True,
|
|
||||||
indent=None,
|
|
||||||
separators=None,
|
|
||||||
encoding='utf-8',
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
|
|
||||||
allow_nan=True, cls=None, indent=None, separators=None,
|
|
||||||
encoding='utf-8', default=None, **kw):
|
|
||||||
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
|
||||||
``.write()``-supporting file-like object).
|
|
||||||
|
|
||||||
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
|
||||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
|
||||||
will be skipped instead of raising a ``TypeError``.
|
|
||||||
|
|
||||||
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
|
|
||||||
may be ``unicode`` instances, subject to normal Python ``str`` to
|
|
||||||
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
|
|
||||||
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
|
|
||||||
to cause an error.
|
|
||||||
|
|
||||||
If ``check_circular`` is false, then the circular reference check
|
|
||||||
for container types will be skipped and a circular reference will
|
|
||||||
result in an ``OverflowError`` (or worse).
|
|
||||||
|
|
||||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
|
||||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
|
||||||
in strict compliance of the JSON specification, instead of using the
|
|
||||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If ``indent`` is a non-negative integer, then JSON array elements and object
|
|
||||||
members will be pretty-printed with that indent level. An indent level
|
|
||||||
of 0 will only insert newlines. ``None`` is the most compact representation.
|
|
||||||
|
|
||||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
|
||||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
|
||||||
``(',', ':')`` is the most compact JSON representation.
|
|
||||||
|
|
||||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
|
||||||
|
|
||||||
``default(obj)`` is a function that should return a serializable version
|
|
||||||
of obj or raise TypeError. The default simply raises TypeError.
|
|
||||||
|
|
||||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
|
||||||
``.default()`` method to serialize additional types), specify it with
|
|
||||||
the ``cls`` kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# cached encoder
|
|
||||||
if (not skipkeys and ensure_ascii and
|
|
||||||
check_circular and allow_nan and
|
|
||||||
cls is None and indent is None and separators is None and
|
|
||||||
encoding == 'utf-8' and default is None and not kw):
|
|
||||||
iterable = _default_encoder.iterencode(obj)
|
|
||||||
else:
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONEncoder
|
|
||||||
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
|
||||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
|
||||||
separators=separators, encoding=encoding,
|
|
||||||
default=default, **kw).iterencode(obj)
|
|
||||||
# could accelerate with writelines in some versions of Python, at
|
|
||||||
# a debuggability cost
|
|
||||||
for chunk in iterable:
|
|
||||||
fp.write(chunk)
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
|
|
||||||
allow_nan=True, cls=None, indent=None, separators=None,
|
|
||||||
encoding='utf-8', default=None, **kw):
|
|
||||||
"""Serialize ``obj`` to a JSON formatted ``str``.
|
|
||||||
|
|
||||||
If ``skipkeys`` is false then ``dict`` keys that are not basic types
|
|
||||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
|
||||||
will be skipped instead of raising a ``TypeError``.
|
|
||||||
|
|
||||||
If ``ensure_ascii`` is false, then the return value will be a
|
|
||||||
``unicode`` instance subject to normal Python ``str`` to ``unicode``
|
|
||||||
coercion rules instead of being escaped to an ASCII ``str``.
|
|
||||||
|
|
||||||
If ``check_circular`` is false, then the circular reference check
|
|
||||||
for container types will be skipped and a circular reference will
|
|
||||||
result in an ``OverflowError`` (or worse).
|
|
||||||
|
|
||||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
|
||||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
|
||||||
strict compliance of the JSON specification, instead of using the
|
|
||||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If ``indent`` is a non-negative integer, then JSON array elements and
|
|
||||||
object members will be pretty-printed with that indent level. An indent
|
|
||||||
level of 0 will only insert newlines. ``None`` is the most compact
|
|
||||||
representation.
|
|
||||||
|
|
||||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
|
||||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
|
||||||
``(',', ':')`` is the most compact JSON representation.
|
|
||||||
|
|
||||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
|
||||||
|
|
||||||
``default(obj)`` is a function that should return a serializable version
|
|
||||||
of obj or raise TypeError. The default simply raises TypeError.
|
|
||||||
|
|
||||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
|
||||||
``.default()`` method to serialize additional types), specify it with
|
|
||||||
the ``cls`` kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# cached encoder
|
|
||||||
if (not skipkeys and ensure_ascii and
|
|
||||||
check_circular and allow_nan and
|
|
||||||
cls is None and indent is None and separators is None and
|
|
||||||
encoding == 'utf-8' and default is None and not kw):
|
|
||||||
return _default_encoder.encode(obj)
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONEncoder
|
|
||||||
return cls(
|
|
||||||
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
|
||||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
|
||||||
separators=separators, encoding=encoding, default=default,
|
|
||||||
**kw).encode(obj)
|
|
||||||
|
|
||||||
|
|
||||||
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
|
|
||||||
|
|
||||||
|
|
||||||
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, **kw):
|
|
||||||
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
|
||||||
a JSON document) to a Python object.
|
|
||||||
|
|
||||||
If the contents of ``fp`` is encoded with an ASCII based encoding other
|
|
||||||
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
|
|
||||||
be specified. Encodings that are not ASCII based (such as UCS-2) are
|
|
||||||
not allowed, and should be wrapped with
|
|
||||||
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
|
|
||||||
object and passed to ``loads()``
|
|
||||||
|
|
||||||
``object_hook`` is an optional function that will be called with the
|
|
||||||
result of any object literal decode (a ``dict``). The return value of
|
|
||||||
``object_hook`` will be used instead of the ``dict``. This feature
|
|
||||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
|
||||||
|
|
||||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
|
||||||
kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return loads(fp.read(),
|
|
||||||
encoding=encoding, cls=cls, object_hook=object_hook,
|
|
||||||
parse_float=parse_float, parse_int=parse_int,
|
|
||||||
parse_constant=parse_constant, **kw)
|
|
||||||
|
|
||||||
|
|
||||||
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, **kw):
|
|
||||||
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
|
||||||
document) to a Python object.
|
|
||||||
|
|
||||||
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
|
|
||||||
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
|
|
||||||
must be specified. Encodings that are not ASCII based (such as UCS-2)
|
|
||||||
are not allowed and should be decoded to ``unicode`` first.
|
|
||||||
|
|
||||||
``object_hook`` is an optional function that will be called with the
|
|
||||||
result of any object literal decode (a ``dict``). The return value of
|
|
||||||
``object_hook`` will be used instead of the ``dict``. This feature
|
|
||||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
|
||||||
|
|
||||||
``parse_float``, if specified, will be called with the string
|
|
||||||
of every JSON float to be decoded. By default this is equivalent to
|
|
||||||
float(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON floats (e.g. decimal.Decimal).
|
|
||||||
|
|
||||||
``parse_int``, if specified, will be called with the string
|
|
||||||
of every JSON int to be decoded. By default this is equivalent to
|
|
||||||
int(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON integers (e.g. float).
|
|
||||||
|
|
||||||
``parse_constant``, if specified, will be called with one of the
|
|
||||||
following strings: -Infinity, Infinity, NaN, null, true, false.
|
|
||||||
This can be used to raise an exception if invalid JSON numbers
|
|
||||||
are encountered.
|
|
||||||
|
|
||||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
|
||||||
kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if (cls is None and encoding is None and object_hook is None and
|
|
||||||
parse_int is None and parse_float is None and
|
|
||||||
parse_constant is None and not kw):
|
|
||||||
return _default_decoder.decode(s)
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONDecoder
|
|
||||||
if object_hook is not None:
|
|
||||||
kw['object_hook'] = object_hook
|
|
||||||
if parse_float is not None:
|
|
||||||
kw['parse_float'] = parse_float
|
|
||||||
if parse_int is not None:
|
|
||||||
kw['parse_int'] = parse_int
|
|
||||||
if parse_constant is not None:
|
|
||||||
kw['parse_constant'] = parse_constant
|
|
||||||
return cls(encoding=encoding, **kw).decode(s)
|
|
||||||
@ -1,354 +0,0 @@
|
|||||||
"""Implementation of JSONDecoder
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import struct
|
|
||||||
|
|
||||||
from simplejson.scanner import make_scanner
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import scanstring as c_scanstring
|
|
||||||
except ImportError:
|
|
||||||
c_scanstring = None
|
|
||||||
|
|
||||||
__all__ = ['JSONDecoder']
|
|
||||||
|
|
||||||
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
|
||||||
|
|
||||||
def _floatconstants():
|
|
||||||
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
|
|
||||||
if sys.byteorder != 'big':
|
|
||||||
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
|
|
||||||
nan, inf = struct.unpack('dd', _BYTES)
|
|
||||||
return nan, inf, -inf
|
|
||||||
|
|
||||||
NaN, PosInf, NegInf = _floatconstants()
|
|
||||||
|
|
||||||
|
|
||||||
def linecol(doc, pos):
|
|
||||||
lineno = doc.count('\n', 0, pos) + 1
|
|
||||||
if lineno == 1:
|
|
||||||
colno = pos
|
|
||||||
else:
|
|
||||||
colno = pos - doc.rindex('\n', 0, pos)
|
|
||||||
return lineno, colno
|
|
||||||
|
|
||||||
|
|
||||||
def errmsg(msg, doc, pos, end=None):
|
|
||||||
# Note that this function is called from _speedups
|
|
||||||
lineno, colno = linecol(doc, pos)
|
|
||||||
if end is None:
|
|
||||||
#fmt = '{0}: line {1} column {2} (char {3})'
|
|
||||||
#return fmt.format(msg, lineno, colno, pos)
|
|
||||||
fmt = '%s: line %d column %d (char %d)'
|
|
||||||
return fmt % (msg, lineno, colno, pos)
|
|
||||||
endlineno, endcolno = linecol(doc, end)
|
|
||||||
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
|
|
||||||
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
|
|
||||||
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
|
|
||||||
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
|
|
||||||
|
|
||||||
|
|
||||||
_CONSTANTS = {
|
|
||||||
'-Infinity': NegInf,
|
|
||||||
'Infinity': PosInf,
|
|
||||||
'NaN': NaN,
|
|
||||||
}
|
|
||||||
|
|
||||||
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
|
|
||||||
BACKSLASH = {
|
|
||||||
'"': u'"', '\\': u'\\', '/': u'/',
|
|
||||||
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
|
|
||||||
}
|
|
||||||
|
|
||||||
DEFAULT_ENCODING = "utf-8"
|
|
||||||
|
|
||||||
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
|
|
||||||
"""Scan the string s for a JSON string. End is the index of the
|
|
||||||
character in s after the quote that started the JSON string.
|
|
||||||
Unescapes all valid JSON string escape sequences and raises ValueError
|
|
||||||
on attempt to decode an invalid string. If strict is False then literal
|
|
||||||
control characters are allowed in the string.
|
|
||||||
|
|
||||||
Returns a tuple of the decoded string and the index of the character in s
|
|
||||||
after the end quote."""
|
|
||||||
if encoding is None:
|
|
||||||
encoding = DEFAULT_ENCODING
|
|
||||||
chunks = []
|
|
||||||
_append = chunks.append
|
|
||||||
begin = end - 1
|
|
||||||
while 1:
|
|
||||||
chunk = _m(s, end)
|
|
||||||
if chunk is None:
|
|
||||||
raise ValueError(
|
|
||||||
errmsg("Unterminated string starting at", s, begin))
|
|
||||||
end = chunk.end()
|
|
||||||
content, terminator = chunk.groups()
|
|
||||||
# Content is contains zero or more unescaped string characters
|
|
||||||
if content:
|
|
||||||
if not isinstance(content, unicode):
|
|
||||||
content = unicode(content, encoding)
|
|
||||||
_append(content)
|
|
||||||
# Terminator is the end of string, a literal control character,
|
|
||||||
# or a backslash denoting that an escape sequence follows
|
|
||||||
if terminator == '"':
|
|
||||||
break
|
|
||||||
elif terminator != '\\':
|
|
||||||
if strict:
|
|
||||||
msg = "Invalid control character %r at" % (terminator,)
|
|
||||||
#msg = "Invalid control character {0!r} at".format(terminator)
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
else:
|
|
||||||
_append(terminator)
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
esc = s[end]
|
|
||||||
except IndexError:
|
|
||||||
raise ValueError(
|
|
||||||
errmsg("Unterminated string starting at", s, begin))
|
|
||||||
# If not a unicode escape sequence, must be in the lookup table
|
|
||||||
if esc != 'u':
|
|
||||||
try:
|
|
||||||
char = _b[esc]
|
|
||||||
except KeyError:
|
|
||||||
msg = "Invalid \\escape: " + repr(esc)
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
end += 1
|
|
||||||
else:
|
|
||||||
# Unicode escape sequence
|
|
||||||
esc = s[end + 1:end + 5]
|
|
||||||
next_end = end + 5
|
|
||||||
if len(esc) != 4:
|
|
||||||
msg = "Invalid \\uXXXX escape"
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
uni = int(esc, 16)
|
|
||||||
# Check for surrogate pair on UCS-4 systems
|
|
||||||
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
|
|
||||||
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
|
|
||||||
if not s[end + 5:end + 7] == '\\u':
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
esc2 = s[end + 7:end + 11]
|
|
||||||
if len(esc2) != 4:
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
uni2 = int(esc2, 16)
|
|
||||||
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
|
|
||||||
next_end += 6
|
|
||||||
char = unichr(uni)
|
|
||||||
end = next_end
|
|
||||||
# Append the unescaped character
|
|
||||||
_append(char)
|
|
||||||
return u''.join(chunks), end
|
|
||||||
|
|
||||||
|
|
||||||
# Use speedup if available
|
|
||||||
scanstring = c_scanstring or py_scanstring
|
|
||||||
|
|
||||||
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
|
|
||||||
WHITESPACE_STR = ' \t\n\r'
|
|
||||||
|
|
||||||
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
|
||||||
pairs = {}
|
|
||||||
# Use a slice to prevent IndexError from being raised, the following
|
|
||||||
# check will raise a more specific ValueError if the string is empty
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Normally we expect nextchar == '"'
|
|
||||||
if nextchar != '"':
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Trivial empty object
|
|
||||||
if nextchar == '}':
|
|
||||||
return pairs, end + 1
|
|
||||||
elif nextchar != '"':
|
|
||||||
raise ValueError(errmsg("Expecting property name", s, end))
|
|
||||||
end += 1
|
|
||||||
while True:
|
|
||||||
key, end = scanstring(s, end, encoding, strict)
|
|
||||||
|
|
||||||
# To skip some function call overhead we optimize the fast paths where
|
|
||||||
# the JSON key separator is ": " or just ":".
|
|
||||||
if s[end:end + 1] != ':':
|
|
||||||
end = _w(s, end).end()
|
|
||||||
if s[end:end + 1] != ':':
|
|
||||||
raise ValueError(errmsg("Expecting : delimiter", s, end))
|
|
||||||
|
|
||||||
end += 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
if s[end] in _ws:
|
|
||||||
end += 1
|
|
||||||
if s[end] in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
value, end = scan_once(s, end)
|
|
||||||
except StopIteration:
|
|
||||||
raise ValueError(errmsg("Expecting object", s, end))
|
|
||||||
pairs[key] = value
|
|
||||||
|
|
||||||
try:
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end]
|
|
||||||
except IndexError:
|
|
||||||
nextchar = ''
|
|
||||||
end += 1
|
|
||||||
|
|
||||||
if nextchar == '}':
|
|
||||||
break
|
|
||||||
elif nextchar != ',':
|
|
||||||
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
|
|
||||||
|
|
||||||
try:
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end += 1
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end]
|
|
||||||
except IndexError:
|
|
||||||
nextchar = ''
|
|
||||||
|
|
||||||
end += 1
|
|
||||||
if nextchar != '"':
|
|
||||||
raise ValueError(errmsg("Expecting property name", s, end - 1))
|
|
||||||
|
|
||||||
if object_hook is not None:
|
|
||||||
pairs = object_hook(pairs)
|
|
||||||
return pairs, end
|
|
||||||
|
|
||||||
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
|
||||||
values = []
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Look-ahead for trivial empty array
|
|
||||||
if nextchar == ']':
|
|
||||||
return values, end + 1
|
|
||||||
_append = values.append
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
value, end = scan_once(s, end)
|
|
||||||
except StopIteration:
|
|
||||||
raise ValueError(errmsg("Expecting object", s, end))
|
|
||||||
_append(value)
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
end += 1
|
|
||||||
if nextchar == ']':
|
|
||||||
break
|
|
||||||
elif nextchar != ',':
|
|
||||||
raise ValueError(errmsg("Expecting , delimiter", s, end))
|
|
||||||
|
|
||||||
try:
|
|
||||||
if s[end] in _ws:
|
|
||||||
end += 1
|
|
||||||
if s[end] in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return values, end
|
|
||||||
|
|
||||||
class JSONDecoder(object):
|
|
||||||
"""Simple JSON <http://json.org> decoder
|
|
||||||
|
|
||||||
Performs the following translations in decoding by default:
|
|
||||||
|
|
||||||
+---------------+-------------------+
|
|
||||||
| JSON | Python |
|
|
||||||
+===============+===================+
|
|
||||||
| object | dict |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| array | list |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| string | unicode |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (int) | int, long |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (real) | float |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| true | True |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| false | False |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| null | None |
|
|
||||||
+---------------+-------------------+
|
|
||||||
|
|
||||||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
|
|
||||||
their corresponding ``float`` values, which is outside the JSON spec.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, encoding=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, strict=True):
|
|
||||||
"""``encoding`` determines the encoding used to interpret any ``str``
|
|
||||||
objects decoded by this instance (utf-8 by default). It has no
|
|
||||||
effect when decoding ``unicode`` objects.
|
|
||||||
|
|
||||||
Note that currently only encodings that are a superset of ASCII work,
|
|
||||||
strings of other encodings should be passed in as ``unicode``.
|
|
||||||
|
|
||||||
``object_hook``, if specified, will be called with the result
|
|
||||||
of every JSON object decoded and its return value will be used in
|
|
||||||
place of the given ``dict``. This can be used to provide custom
|
|
||||||
deserializations (e.g. to support JSON-RPC class hinting).
|
|
||||||
|
|
||||||
``parse_float``, if specified, will be called with the string
|
|
||||||
of every JSON float to be decoded. By default this is equivalent to
|
|
||||||
float(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON floats (e.g. decimal.Decimal).
|
|
||||||
|
|
||||||
``parse_int``, if specified, will be called with the string
|
|
||||||
of every JSON int to be decoded. By default this is equivalent to
|
|
||||||
int(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON integers (e.g. float).
|
|
||||||
|
|
||||||
``parse_constant``, if specified, will be called with one of the
|
|
||||||
following strings: -Infinity, Infinity, NaN.
|
|
||||||
This can be used to raise an exception if invalid JSON numbers
|
|
||||||
are encountered.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.encoding = encoding
|
|
||||||
self.object_hook = object_hook
|
|
||||||
self.parse_float = parse_float or float
|
|
||||||
self.parse_int = parse_int or int
|
|
||||||
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
|
|
||||||
self.strict = strict
|
|
||||||
self.parse_object = JSONObject
|
|
||||||
self.parse_array = JSONArray
|
|
||||||
self.parse_string = scanstring
|
|
||||||
self.scan_once = make_scanner(self)
|
|
||||||
|
|
||||||
def decode(self, s, _w=WHITESPACE.match):
|
|
||||||
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
|
|
||||||
instance containing a JSON document)
|
|
||||||
|
|
||||||
"""
|
|
||||||
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
|
|
||||||
end = _w(s, end).end()
|
|
||||||
if end != len(s):
|
|
||||||
raise ValueError(errmsg("Extra data", s, end, len(s)))
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def raw_decode(self, s, idx=0):
|
|
||||||
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
|
|
||||||
with a JSON document) and return a 2-tuple of the Python
|
|
||||||
representation and the index in ``s`` where the document ended.
|
|
||||||
|
|
||||||
This can be used to decode a JSON document from a string that may
|
|
||||||
have extraneous data at the end.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
obj, end = self.scan_once(s, idx)
|
|
||||||
except StopIteration:
|
|
||||||
raise ValueError("No JSON object could be decoded")
|
|
||||||
return obj, end
|
|
||||||
@ -1,440 +0,0 @@
|
|||||||
"""Implementation of JSONEncoder
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
|
|
||||||
except ImportError:
|
|
||||||
c_encode_basestring_ascii = None
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import make_encoder as c_make_encoder
|
|
||||||
except ImportError:
|
|
||||||
c_make_encoder = None
|
|
||||||
|
|
||||||
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
|
|
||||||
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
|
||||||
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
|
||||||
ESCAPE_DCT = {
|
|
||||||
'\\': '\\\\',
|
|
||||||
'"': '\\"',
|
|
||||||
'\b': '\\b',
|
|
||||||
'\f': '\\f',
|
|
||||||
'\n': '\\n',
|
|
||||||
'\r': '\\r',
|
|
||||||
'\t': '\\t',
|
|
||||||
}
|
|
||||||
for i in range(0x20):
|
|
||||||
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
|
||||||
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
|
||||||
|
|
||||||
# Assume this produces an infinity on all machines (probably not guaranteed)
|
|
||||||
INFINITY = float('1e66666')
|
|
||||||
FLOAT_REPR = repr
|
|
||||||
|
|
||||||
def encode_basestring(s):
|
|
||||||
"""Return a JSON representation of a Python string
|
|
||||||
|
|
||||||
"""
|
|
||||||
def replace(match):
|
|
||||||
return ESCAPE_DCT[match.group(0)]
|
|
||||||
return '"' + ESCAPE.sub(replace, s) + '"'
|
|
||||||
|
|
||||||
|
|
||||||
def py_encode_basestring_ascii(s):
|
|
||||||
"""Return an ASCII-only JSON representation of a Python string
|
|
||||||
|
|
||||||
"""
|
|
||||||
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
|
||||||
s = s.decode('utf-8')
|
|
||||||
def replace(match):
|
|
||||||
s = match.group(0)
|
|
||||||
try:
|
|
||||||
return ESCAPE_DCT[s]
|
|
||||||
except KeyError:
|
|
||||||
n = ord(s)
|
|
||||||
if n < 0x10000:
|
|
||||||
#return '\\u{0:04x}'.format(n)
|
|
||||||
return '\\u%04x' % (n,)
|
|
||||||
else:
|
|
||||||
# surrogate pair
|
|
||||||
n -= 0x10000
|
|
||||||
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
|
||||||
s2 = 0xdc00 | (n & 0x3ff)
|
|
||||||
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
|
||||||
return '\\u%04x\\u%04x' % (s1, s2)
|
|
||||||
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
|
||||||
|
|
||||||
|
|
||||||
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
|
|
||||||
|
|
||||||
class JSONEncoder(object):
|
|
||||||
"""Extensible JSON <http://json.org> encoder for Python data structures.
|
|
||||||
|
|
||||||
Supports the following objects and types by default:
|
|
||||||
|
|
||||||
+-------------------+---------------+
|
|
||||||
| Python | JSON |
|
|
||||||
+===================+===============+
|
|
||||||
| dict | object |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| list, tuple | array |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| str, unicode | string |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| int, long, float | number |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| True | true |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| False | false |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| None | null |
|
|
||||||
+-------------------+---------------+
|
|
||||||
|
|
||||||
To extend this to recognize other objects, subclass and implement a
|
|
||||||
``.default()`` method with another method that returns a serializable
|
|
||||||
object for ``o`` if possible, otherwise it should call the superclass
|
|
||||||
implementation (to raise ``TypeError``).
|
|
||||||
|
|
||||||
"""
|
|
||||||
item_separator = ', '
|
|
||||||
key_separator = ': '
|
|
||||||
def __init__(self, skipkeys=False, ensure_ascii=True,
|
|
||||||
check_circular=True, allow_nan=True, sort_keys=False,
|
|
||||||
indent=None, separators=None, encoding='utf-8', default=None):
|
|
||||||
"""Constructor for JSONEncoder, with sensible defaults.
|
|
||||||
|
|
||||||
If skipkeys is false, then it is a TypeError to attempt
|
|
||||||
encoding of keys that are not str, int, long, float or None. If
|
|
||||||
skipkeys is True, such items are simply skipped.
|
|
||||||
|
|
||||||
If ensure_ascii is true, the output is guaranteed to be str
|
|
||||||
objects with all incoming unicode characters escaped. If
|
|
||||||
ensure_ascii is false, the output will be unicode object.
|
|
||||||
|
|
||||||
If check_circular is true, then lists, dicts, and custom encoded
|
|
||||||
objects will be checked for circular references during encoding to
|
|
||||||
prevent an infinite recursion (which would cause an OverflowError).
|
|
||||||
Otherwise, no such check takes place.
|
|
||||||
|
|
||||||
If allow_nan is true, then NaN, Infinity, and -Infinity will be
|
|
||||||
encoded as such. This behavior is not JSON specification compliant,
|
|
||||||
but is consistent with most JavaScript based encoders and decoders.
|
|
||||||
Otherwise, it will be a ValueError to encode such floats.
|
|
||||||
|
|
||||||
If sort_keys is true, then the output of dictionaries will be
|
|
||||||
sorted by key; this is useful for regression tests to ensure
|
|
||||||
that JSON serializations can be compared on a day-to-day basis.
|
|
||||||
|
|
||||||
If indent is a non-negative integer, then JSON array
|
|
||||||
elements and object members will be pretty-printed with that
|
|
||||||
indent level. An indent level of 0 will only insert newlines.
|
|
||||||
None is the most compact representation.
|
|
||||||
|
|
||||||
If specified, separators should be a (item_separator, key_separator)
|
|
||||||
tuple. The default is (', ', ': '). To get the most compact JSON
|
|
||||||
representation you should specify (',', ':') to eliminate whitespace.
|
|
||||||
|
|
||||||
If specified, default is a function that gets called for objects
|
|
||||||
that can't otherwise be serialized. It should return a JSON encodable
|
|
||||||
version of the object or raise a ``TypeError``.
|
|
||||||
|
|
||||||
If encoding is not None, then all input strings will be
|
|
||||||
transformed into unicode using that encoding prior to JSON-encoding.
|
|
||||||
The default is UTF-8.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.skipkeys = skipkeys
|
|
||||||
self.ensure_ascii = ensure_ascii
|
|
||||||
self.check_circular = check_circular
|
|
||||||
self.allow_nan = allow_nan
|
|
||||||
self.sort_keys = sort_keys
|
|
||||||
self.indent = indent
|
|
||||||
if separators is not None:
|
|
||||||
self.item_separator, self.key_separator = separators
|
|
||||||
if default is not None:
|
|
||||||
self.default = default
|
|
||||||
self.encoding = encoding
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
"""Implement this method in a subclass such that it returns
|
|
||||||
a serializable object for ``o``, or calls the base implementation
|
|
||||||
(to raise a ``TypeError``).
|
|
||||||
|
|
||||||
For example, to support arbitrary iterators, you could
|
|
||||||
implement default like this::
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
try:
|
|
||||||
iterable = iter(o)
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return list(iterable)
|
|
||||||
return JSONEncoder.default(self, o)
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
|
|
||||||
def encode(self, o):
|
|
||||||
"""Return a JSON string representation of a Python data structure.
|
|
||||||
|
|
||||||
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
|
||||||
'{"foo": ["bar", "baz"]}'
|
|
||||||
|
|
||||||
"""
|
|
||||||
# This is for extremely simple cases and benchmarks.
|
|
||||||
if isinstance(o, basestring):
|
|
||||||
if isinstance(o, str):
|
|
||||||
_encoding = self.encoding
|
|
||||||
if (_encoding is not None
|
|
||||||
and not (_encoding == 'utf-8')):
|
|
||||||
o = o.decode(_encoding)
|
|
||||||
if self.ensure_ascii:
|
|
||||||
return encode_basestring_ascii(o)
|
|
||||||
else:
|
|
||||||
return encode_basestring(o)
|
|
||||||
# This doesn't pass the iterator directly to ''.join() because the
|
|
||||||
# exceptions aren't as detailed. The list call should be roughly
|
|
||||||
# equivalent to the PySequence_Fast that ''.join() would do.
|
|
||||||
chunks = self.iterencode(o, _one_shot=True)
|
|
||||||
if not isinstance(chunks, (list, tuple)):
|
|
||||||
chunks = list(chunks)
|
|
||||||
return ''.join(chunks)
|
|
||||||
|
|
||||||
def iterencode(self, o, _one_shot=False):
|
|
||||||
"""Encode the given object and yield each string
|
|
||||||
representation as available.
|
|
||||||
|
|
||||||
For example::
|
|
||||||
|
|
||||||
for chunk in JSONEncoder().iterencode(bigobject):
|
|
||||||
mysocket.write(chunk)
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.check_circular:
|
|
||||||
markers = {}
|
|
||||||
else:
|
|
||||||
markers = None
|
|
||||||
if self.ensure_ascii:
|
|
||||||
_encoder = encode_basestring_ascii
|
|
||||||
else:
|
|
||||||
_encoder = encode_basestring
|
|
||||||
if self.encoding != 'utf-8':
|
|
||||||
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
|
||||||
if isinstance(o, str):
|
|
||||||
o = o.decode(_encoding)
|
|
||||||
return _orig_encoder(o)
|
|
||||||
|
|
||||||
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
|
|
||||||
# Check for specials. Note that this type of test is processor- and/or
|
|
||||||
# platform-specific, so do tests which don't depend on the internals.
|
|
||||||
|
|
||||||
if o != o:
|
|
||||||
text = 'NaN'
|
|
||||||
elif o == _inf:
|
|
||||||
text = 'Infinity'
|
|
||||||
elif o == _neginf:
|
|
||||||
text = '-Infinity'
|
|
||||||
else:
|
|
||||||
return _repr(o)
|
|
||||||
|
|
||||||
if not allow_nan:
|
|
||||||
raise ValueError(
|
|
||||||
"Out of range float values are not JSON compliant: " +
|
|
||||||
repr(o))
|
|
||||||
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
|
|
||||||
_iterencode = c_make_encoder(
|
|
||||||
markers, self.default, _encoder, self.indent,
|
|
||||||
self.key_separator, self.item_separator, self.sort_keys,
|
|
||||||
self.skipkeys, self.allow_nan)
|
|
||||||
else:
|
|
||||||
_iterencode = _make_iterencode(
|
|
||||||
markers, self.default, _encoder, self.indent, floatstr,
|
|
||||||
self.key_separator, self.item_separator, self.sort_keys,
|
|
||||||
self.skipkeys, _one_shot)
|
|
||||||
return _iterencode(o, 0)
|
|
||||||
|
|
||||||
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
|
|
||||||
## HACK: hand-optimized bytecode; turn globals into locals
|
|
||||||
False=False,
|
|
||||||
True=True,
|
|
||||||
ValueError=ValueError,
|
|
||||||
basestring=basestring,
|
|
||||||
dict=dict,
|
|
||||||
float=float,
|
|
||||||
id=id,
|
|
||||||
int=int,
|
|
||||||
isinstance=isinstance,
|
|
||||||
list=list,
|
|
||||||
long=long,
|
|
||||||
str=str,
|
|
||||||
tuple=tuple,
|
|
||||||
):
|
|
||||||
|
|
||||||
def _iterencode_list(lst, _current_indent_level):
|
|
||||||
if not lst:
|
|
||||||
yield '[]'
|
|
||||||
return
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(lst)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = lst
|
|
||||||
buf = '['
|
|
||||||
if _indent is not None:
|
|
||||||
_current_indent_level += 1
|
|
||||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
separator = _item_separator + newline_indent
|
|
||||||
buf += newline_indent
|
|
||||||
else:
|
|
||||||
newline_indent = None
|
|
||||||
separator = _item_separator
|
|
||||||
first = True
|
|
||||||
for value in lst:
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
buf = separator
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
yield buf + _encoder(value)
|
|
||||||
elif value is None:
|
|
||||||
yield buf + 'null'
|
|
||||||
elif value is True:
|
|
||||||
yield buf + 'true'
|
|
||||||
elif value is False:
|
|
||||||
yield buf + 'false'
|
|
||||||
elif isinstance(value, (int, long)):
|
|
||||||
yield buf + str(value)
|
|
||||||
elif isinstance(value, float):
|
|
||||||
yield buf + _floatstr(value)
|
|
||||||
else:
|
|
||||||
yield buf
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
chunks = _iterencode_list(value, _current_indent_level)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
chunks = _iterencode_dict(value, _current_indent_level)
|
|
||||||
else:
|
|
||||||
chunks = _iterencode(value, _current_indent_level)
|
|
||||||
for chunk in chunks:
|
|
||||||
yield chunk
|
|
||||||
if newline_indent is not None:
|
|
||||||
_current_indent_level -= 1
|
|
||||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
yield ']'
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
def _iterencode_dict(dct, _current_indent_level):
|
|
||||||
if not dct:
|
|
||||||
yield '{}'
|
|
||||||
return
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(dct)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = dct
|
|
||||||
yield '{'
|
|
||||||
if _indent is not None:
|
|
||||||
_current_indent_level += 1
|
|
||||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
item_separator = _item_separator + newline_indent
|
|
||||||
yield newline_indent
|
|
||||||
else:
|
|
||||||
newline_indent = None
|
|
||||||
item_separator = _item_separator
|
|
||||||
first = True
|
|
||||||
if _sort_keys:
|
|
||||||
items = dct.items()
|
|
||||||
items.sort(key=lambda kv: kv[0])
|
|
||||||
else:
|
|
||||||
items = dct.iteritems()
|
|
||||||
for key, value in items:
|
|
||||||
if isinstance(key, basestring):
|
|
||||||
pass
|
|
||||||
# JavaScript is weakly typed for these, so it makes sense to
|
|
||||||
# also allow them. Many encoders seem to do something like this.
|
|
||||||
elif isinstance(key, float):
|
|
||||||
key = _floatstr(key)
|
|
||||||
elif key is True:
|
|
||||||
key = 'true'
|
|
||||||
elif key is False:
|
|
||||||
key = 'false'
|
|
||||||
elif key is None:
|
|
||||||
key = 'null'
|
|
||||||
elif isinstance(key, (int, long)):
|
|
||||||
key = str(key)
|
|
||||||
elif _skipkeys:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise TypeError("key " + repr(key) + " is not a string")
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
yield item_separator
|
|
||||||
yield _encoder(key)
|
|
||||||
yield _key_separator
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
yield _encoder(value)
|
|
||||||
elif value is None:
|
|
||||||
yield 'null'
|
|
||||||
elif value is True:
|
|
||||||
yield 'true'
|
|
||||||
elif value is False:
|
|
||||||
yield 'false'
|
|
||||||
elif isinstance(value, (int, long)):
|
|
||||||
yield str(value)
|
|
||||||
elif isinstance(value, float):
|
|
||||||
yield _floatstr(value)
|
|
||||||
else:
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
chunks = _iterencode_list(value, _current_indent_level)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
chunks = _iterencode_dict(value, _current_indent_level)
|
|
||||||
else:
|
|
||||||
chunks = _iterencode(value, _current_indent_level)
|
|
||||||
for chunk in chunks:
|
|
||||||
yield chunk
|
|
||||||
if newline_indent is not None:
|
|
||||||
_current_indent_level -= 1
|
|
||||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
yield '}'
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
def _iterencode(o, _current_indent_level):
|
|
||||||
if isinstance(o, basestring):
|
|
||||||
yield _encoder(o)
|
|
||||||
elif o is None:
|
|
||||||
yield 'null'
|
|
||||||
elif o is True:
|
|
||||||
yield 'true'
|
|
||||||
elif o is False:
|
|
||||||
yield 'false'
|
|
||||||
elif isinstance(o, (int, long)):
|
|
||||||
yield str(o)
|
|
||||||
elif isinstance(o, float):
|
|
||||||
yield _floatstr(o)
|
|
||||||
elif isinstance(o, (list, tuple)):
|
|
||||||
for chunk in _iterencode_list(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
elif isinstance(o, dict):
|
|
||||||
for chunk in _iterencode_dict(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
else:
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(o)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = o
|
|
||||||
o = _default(o)
|
|
||||||
for chunk in _iterencode(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
return _iterencode
|
|
||||||
@ -1,65 +0,0 @@
|
|||||||
"""JSON token scanner
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import make_scanner as c_make_scanner
|
|
||||||
except ImportError:
|
|
||||||
c_make_scanner = None
|
|
||||||
|
|
||||||
__all__ = ['make_scanner']
|
|
||||||
|
|
||||||
NUMBER_RE = re.compile(
|
|
||||||
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
|
|
||||||
(re.VERBOSE | re.MULTILINE | re.DOTALL))
|
|
||||||
|
|
||||||
def py_make_scanner(context):
|
|
||||||
parse_object = context.parse_object
|
|
||||||
parse_array = context.parse_array
|
|
||||||
parse_string = context.parse_string
|
|
||||||
match_number = NUMBER_RE.match
|
|
||||||
encoding = context.encoding
|
|
||||||
strict = context.strict
|
|
||||||
parse_float = context.parse_float
|
|
||||||
parse_int = context.parse_int
|
|
||||||
parse_constant = context.parse_constant
|
|
||||||
object_hook = context.object_hook
|
|
||||||
|
|
||||||
def _scan_once(string, idx):
|
|
||||||
try:
|
|
||||||
nextchar = string[idx]
|
|
||||||
except IndexError:
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
if nextchar == '"':
|
|
||||||
return parse_string(string, idx + 1, encoding, strict)
|
|
||||||
elif nextchar == '{':
|
|
||||||
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
|
|
||||||
elif nextchar == '[':
|
|
||||||
return parse_array((string, idx + 1), _scan_once)
|
|
||||||
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
|
|
||||||
return None, idx + 4
|
|
||||||
elif nextchar == 't' and string[idx:idx + 4] == 'true':
|
|
||||||
return True, idx + 4
|
|
||||||
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
|
|
||||||
return False, idx + 5
|
|
||||||
|
|
||||||
m = match_number(string, idx)
|
|
||||||
if m is not None:
|
|
||||||
integer, frac, exp = m.groups()
|
|
||||||
if frac or exp:
|
|
||||||
res = parse_float(integer + (frac or '') + (exp or ''))
|
|
||||||
else:
|
|
||||||
res = parse_int(integer)
|
|
||||||
return res, m.end()
|
|
||||||
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
|
|
||||||
return parse_constant('NaN'), idx + 3
|
|
||||||
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
|
|
||||||
return parse_constant('Infinity'), idx + 8
|
|
||||||
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
|
|
||||||
return parse_constant('-Infinity'), idx + 9
|
|
||||||
else:
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
return _scan_once
|
|
||||||
|
|
||||||
make_scanner = c_make_scanner or py_make_scanner
|
|
||||||
@ -0,0 +1,44 @@
|
|||||||
|
# Copyright 2022, Mitogen contributers
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are met:
|
||||||
|
#
|
||||||
|
# 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer.
|
||||||
|
#
|
||||||
|
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
# this list of conditions and the following disclaimer in the documentation
|
||||||
|
# and/or other materials provided with the distribution.
|
||||||
|
#
|
||||||
|
# 3. Neither the name of the copyright holder nor the names of its contributors
|
||||||
|
# may be used to endorse or promote products derived from this software without
|
||||||
|
# specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||||
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||||
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||||
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||||
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||||
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||||
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||||
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||||
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
import ansible_mitogen
|
||||||
|
except ImportError:
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(__file__, '../../../..')))
|
||||||
|
|
||||||
|
import ansible_mitogen.connection
|
||||||
|
|
||||||
|
|
||||||
|
class Connection(ansible_mitogen.connection.Connection):
|
||||||
|
transport = 'podman'
|
||||||
@ -1,13 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import distutils.version
|
|
||||||
|
|
||||||
import ansible
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'ansible_version',
|
|
||||||
]
|
|
||||||
|
|
||||||
ansible_version = tuple(distutils.version.LooseVersion(ansible.__version__).version)
|
|
||||||
del distutils
|
|
||||||
del ansible
|
|
||||||
@ -0,0 +1,29 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
import ansible
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'ansible_version',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _parse(v_string):
|
||||||
|
# Adapted from distutils.version.LooseVersion.parse()
|
||||||
|
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
||||||
|
for component in component_re.split(v_string):
|
||||||
|
if not component or component == '.':
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
yield int(component)
|
||||||
|
except ValueError:
|
||||||
|
yield component
|
||||||
|
|
||||||
|
|
||||||
|
ansible_version = tuple(_parse(ansible.__version__))
|
||||||
|
|
||||||
|
del _parse
|
||||||
|
del re
|
||||||
|
del ansible
|
||||||
@ -0,0 +1,123 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import ansible
|
||||||
|
import ansible.utils.unsafe_proxy
|
||||||
|
|
||||||
|
import ansible_mitogen.utils
|
||||||
|
|
||||||
|
import mitogen
|
||||||
|
import mitogen.core
|
||||||
|
import mitogen.utils
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'cast',
|
||||||
|
]
|
||||||
|
|
||||||
|
def _cast_to_dict(obj): return {cast(k): cast(v) for k, v in obj.items()}
|
||||||
|
def _cast_to_list(obj): return [cast(v) for v in obj]
|
||||||
|
def _cast_to_set(obj): return set(cast(v) for v in obj)
|
||||||
|
def _cast_to_tuple(obj): return tuple(cast(v) for v in obj)
|
||||||
|
def _cast_unsafe(obj): return obj._strip_unsafe()
|
||||||
|
def _passthrough(obj): return obj
|
||||||
|
def _untag(obj): return obj._native_copy()
|
||||||
|
|
||||||
|
|
||||||
|
# A dispatch table to cast objects based on their exact type.
|
||||||
|
# This is an optimisation, reliable fallbacks are required (e.g. isinstance())
|
||||||
|
_CAST_DISPATCH = {
|
||||||
|
bytes: bytes,
|
||||||
|
dict: _cast_to_dict,
|
||||||
|
list: _cast_to_list,
|
||||||
|
mitogen.core.UnicodeType: mitogen.core.UnicodeType,
|
||||||
|
}
|
||||||
|
_CAST_DISPATCH.update({t: _passthrough for t in mitogen.utils.PASSTHROUGH})
|
||||||
|
|
||||||
|
_CAST_SUBTYPES = [
|
||||||
|
dict,
|
||||||
|
list,
|
||||||
|
]
|
||||||
|
|
||||||
|
if hasattr(ansible.utils.unsafe_proxy, 'TrustedAsTemplate'):
|
||||||
|
import datetime
|
||||||
|
import ansible.module_utils._internal._datatag
|
||||||
|
_CAST_DISPATCH.update({
|
||||||
|
set: _cast_to_set,
|
||||||
|
tuple: _cast_to_tuple,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedBytes: _untag,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedDate: _untag,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedDateTime: _untag,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedDict: _cast_to_dict,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedFloat: _untag,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedInt: _untag,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedList: _cast_to_list,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedSet: _cast_to_set,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedStr: _untag,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedTime: _untag,
|
||||||
|
ansible.module_utils._internal._datatag._AnsibleTaggedTuple: _cast_to_tuple,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeBytes: bytes,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeText: mitogen.core.UnicodeType,
|
||||||
|
datetime.date: _passthrough,
|
||||||
|
datetime.datetime: _passthrough,
|
||||||
|
datetime.time: _passthrough,
|
||||||
|
})
|
||||||
|
_CAST_SUBTYPES.extend([
|
||||||
|
set,
|
||||||
|
tuple,
|
||||||
|
])
|
||||||
|
elif hasattr(ansible.utils.unsafe_proxy.AnsibleUnsafeText, '_strip_unsafe'):
|
||||||
|
_CAST_DISPATCH.update({
|
||||||
|
tuple: _cast_to_list,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeBytes: _cast_unsafe,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeText: _cast_unsafe,
|
||||||
|
ansible.utils.unsafe_proxy.NativeJinjaUnsafeText: _cast_unsafe,
|
||||||
|
})
|
||||||
|
_CAST_SUBTYPES.extend([
|
||||||
|
tuple,
|
||||||
|
])
|
||||||
|
elif ansible_mitogen.utils.ansible_version[:2] <= (2, 16):
|
||||||
|
_CAST_DISPATCH.update({
|
||||||
|
tuple: _cast_to_list,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeBytes: bytes,
|
||||||
|
ansible.utils.unsafe_proxy.AnsibleUnsafeText: mitogen.core.UnicodeType,
|
||||||
|
})
|
||||||
|
_CAST_SUBTYPES.extend([
|
||||||
|
tuple,
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
mitogen_ver = '.'.join(str(v) for v in mitogen.__version__)
|
||||||
|
raise ImportError("Mitogen %s can't cast Ansible %s objects"
|
||||||
|
% (mitogen_ver, ansible.__version__))
|
||||||
|
|
||||||
|
|
||||||
|
def cast(obj):
|
||||||
|
"""
|
||||||
|
Return obj (or a copy) with subtypes of builtins cast to their supertype.
|
||||||
|
|
||||||
|
This is an enhanced version of :func:`mitogen.utils.cast`. In addition it
|
||||||
|
handles ``ansible.utils.unsafe_proxy.AnsibleUnsafeText`` and variants.
|
||||||
|
|
||||||
|
There are types handled by :func:`ansible.utils.unsafe_proxy.wrap_var()`
|
||||||
|
that this function currently does not handle (e.g. `set()`), or preserve
|
||||||
|
preserve (e.g. `tuple()`). Future enhancements may change this.
|
||||||
|
|
||||||
|
:param obj:
|
||||||
|
Object to undecorate.
|
||||||
|
:returns:
|
||||||
|
Undecorated object.
|
||||||
|
"""
|
||||||
|
# Fast path: obj is a known type, dispatch directly
|
||||||
|
try:
|
||||||
|
unwrapper = _CAST_DISPATCH[type(obj)]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return unwrapper(obj)
|
||||||
|
|
||||||
|
# Slow path: obj is some unknown subclass
|
||||||
|
for typ_ in _CAST_SUBTYPES:
|
||||||
|
if isinstance(obj, typ_):
|
||||||
|
unwrapper = _CAST_DISPATCH[typ_]
|
||||||
|
return unwrapper(obj)
|
||||||
|
|
||||||
|
return mitogen.utils.cast(obj)
|
||||||
@ -1,14 +0,0 @@
|
|||||||
<!doctype html>
|
|
||||||
<title>Mitogen for Ansible (Redirect)</title>
|
|
||||||
<script>
|
|
||||||
{% include "piwik-config.js" %}
|
|
||||||
var u="https://networkgenomics.com/p/tr/";
|
|
||||||
_paq.push(['setTrackerUrl', u+'ep']);
|
|
||||||
</script>
|
|
||||||
<script src="https://networkgenomics.com/p/tr/js"></script>
|
|
||||||
<script>
|
|
||||||
setTimeout(function() {
|
|
||||||
window.location = 'https://networkgenomics.com/ansible/';
|
|
||||||
}, 0);
|
|
||||||
</script>
|
|
||||||
<meta http-equiv="Refresh" content="0; url=https://networkgenomics.com/ansible/">
|
|
||||||
@ -1,4 +1,4 @@
|
|||||||
<p>
|
<p>
|
||||||
<br>
|
<br>
|
||||||
<a class="github-button" href="https://github.com/dw/mitogen/" data-size="large" data-show-count="true" aria-label="Star dw/mitogen on GitHub">Star</a>
|
<a class="github-button" href="https://github.com/mitogen-hq/mitogen/" data-size="large" data-show-count="true" aria-label="Star mitogen on GitHub">Star</a>
|
||||||
</p>
|
</p>
|
||||||
|
|||||||
@ -1,5 +0,0 @@
|
|||||||
window._paq = [];
|
|
||||||
window._paq.push(['trackPageView']);
|
|
||||||
window._paq.push(['enableLinkTracking']);
|
|
||||||
window._paq.push(['enableHeartBeatTimer', 30]);
|
|
||||||
window._paq.push(['setSiteId', 6]);
|
|
||||||
@ -0,0 +1,2 @@
|
|||||||
|
[build.environment]
|
||||||
|
PYTHON_VERSION = "3.8"
|
||||||
@ -1,3 +1,6 @@
|
|||||||
|
docutils<0.18
|
||||||
|
Jinja2<3
|
||||||
|
MarkupSafe<2.1
|
||||||
Sphinx==2.1.2; python_version > '3.0'
|
Sphinx==2.1.2; python_version > '3.0'
|
||||||
sphinxcontrib-programoutput==0.14; python_version > '3.0'
|
sphinxcontrib-programoutput==0.14; python_version > '3.0'
|
||||||
alabaster==0.7.10; python_version > '3.0'
|
alabaster==0.7.10; python_version > '3.0'
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,38 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2025 Mitogen authors <https://github.com/mitogen-hq>
|
||||||
|
# SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 14):
|
||||||
|
from mitogen.imports._py314 import _code_imports
|
||||||
|
elif sys.version_info >= (3, 6):
|
||||||
|
from mitogen.imports._py36 import _code_imports
|
||||||
|
elif sys.version_info >= (2, 5):
|
||||||
|
from mitogen.imports._py2 import _code_imports_py25 as _code_imports
|
||||||
|
else:
|
||||||
|
from mitogen.imports._py2 import _code_imports_py24 as _code_imports
|
||||||
|
|
||||||
|
|
||||||
|
def codeobj_imports(co):
|
||||||
|
"""
|
||||||
|
Yield (level, modname, names) tuples by scanning the code object `co`.
|
||||||
|
|
||||||
|
Top level `import mod` & `from mod import foo` statements are matched.
|
||||||
|
Those inside a `class ...` or `def ...` block are currently skipped.
|
||||||
|
|
||||||
|
>>> co = compile('import a, b; from c import d, e as f', '<str>', 'exec')
|
||||||
|
>>> list(codeobj_imports(co)) # doctest: +ELLIPSIS
|
||||||
|
[(..., 'a', ()), (..., 'b', ()), (..., 'c', ('d', 'e'))]
|
||||||
|
|
||||||
|
:return:
|
||||||
|
Generator producing `(level, modname, names)` tuples, where:
|
||||||
|
|
||||||
|
* `level`:
|
||||||
|
-1 implicit relative (Python 2.x default)
|
||||||
|
0 absolute (Python 3.x, `from __future__ import absolute_import`)
|
||||||
|
>0 explicit relative (`from . import a`, `from ..b, import c`)
|
||||||
|
* `modname`: Name of module to import, or to import `names` from.
|
||||||
|
* `names`: tuple of names in `from mod import ..`.
|
||||||
|
"""
|
||||||
|
return _code_imports(co.co_code, co.co_consts, co.co_names)
|
||||||
@ -0,0 +1,54 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2025 Mitogen authors <https://github.com/mitogen-hq>
|
||||||
|
# SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
import array
|
||||||
|
import itertools
|
||||||
|
import opcode
|
||||||
|
|
||||||
|
|
||||||
|
IMPORT_NAME = opcode.opmap['IMPORT_NAME']
|
||||||
|
LOAD_CONST = opcode.opmap['LOAD_CONST']
|
||||||
|
|
||||||
|
|
||||||
|
def _opargs(code, _have_arg=opcode.HAVE_ARGUMENT):
|
||||||
|
it = iter(array.array('B', code))
|
||||||
|
nexti = it.next
|
||||||
|
for i in it:
|
||||||
|
if i >= _have_arg:
|
||||||
|
yield (i, nexti() | (nexti() << 8))
|
||||||
|
else:
|
||||||
|
yield (i, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _code_imports_py25(code, consts, names):
|
||||||
|
it1, it2, it3 = itertools.tee(_opargs(code), 3)
|
||||||
|
try:
|
||||||
|
next(it2)
|
||||||
|
next(it3)
|
||||||
|
next(it3)
|
||||||
|
except StopIteration:
|
||||||
|
return
|
||||||
|
for oparg1, oparg2, (op3, arg3) in itertools.izip(it1, it2, it3):
|
||||||
|
if op3 != IMPORT_NAME:
|
||||||
|
continue
|
||||||
|
op1, arg1 = oparg1
|
||||||
|
op2, arg2 = oparg2
|
||||||
|
if op1 != LOAD_CONST or op2 != LOAD_CONST:
|
||||||
|
continue
|
||||||
|
yield (consts[arg1], names[arg3], consts[arg2] or ())
|
||||||
|
|
||||||
|
|
||||||
|
def _code_imports_py24(code, consts, names):
|
||||||
|
it1, it2 = itertools.tee(_opargs(code), 2)
|
||||||
|
try:
|
||||||
|
next(it2)
|
||||||
|
except StopIteration:
|
||||||
|
return
|
||||||
|
for oparg1, (op2, arg2) in itertools.izip(it1, it2):
|
||||||
|
if op2 != IMPORT_NAME:
|
||||||
|
continue
|
||||||
|
op1, arg1 = oparg1
|
||||||
|
if op1 != LOAD_CONST:
|
||||||
|
continue
|
||||||
|
yield (-1, names[arg2], consts[arg1] or ())
|
||||||
@ -0,0 +1,26 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2025 Mitogen authors <https://github.com/mitogen-hq>
|
||||||
|
# SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
import opcode
|
||||||
|
|
||||||
|
IMPORT_NAME = opcode.opmap['IMPORT_NAME']
|
||||||
|
LOAD_CONST = opcode.opmap['LOAD_CONST']
|
||||||
|
LOAD_SMALL_INT = opcode.opmap['LOAD_SMALL_INT']
|
||||||
|
|
||||||
|
|
||||||
|
def _code_imports(code, consts, names):
|
||||||
|
start = 4
|
||||||
|
while True:
|
||||||
|
op3_idx = code.find(IMPORT_NAME, start, -1)
|
||||||
|
if op3_idx < 0:
|
||||||
|
return
|
||||||
|
if op3_idx % 2:
|
||||||
|
start = op3_idx + 1
|
||||||
|
continue
|
||||||
|
if code[op3_idx-4] != LOAD_SMALL_INT or code[op3_idx-2] != LOAD_CONST:
|
||||||
|
start = op3_idx + 2
|
||||||
|
continue
|
||||||
|
start = op3_idx + 6
|
||||||
|
arg1, arg2, arg3 = code[op3_idx-3], code[op3_idx-1], code[op3_idx+1]
|
||||||
|
yield (arg1, names[arg3], consts[arg2] or ())
|
||||||
@ -0,0 +1,25 @@
|
|||||||
|
# SPDX-FileCopyrightText: 2025 Mitogen authors <https://github.com/mitogen-hq>
|
||||||
|
# SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
# !mitogen: minify_safe
|
||||||
|
|
||||||
|
import opcode
|
||||||
|
|
||||||
|
IMPORT_NAME = opcode.opmap['IMPORT_NAME']
|
||||||
|
LOAD_CONST = opcode.opmap['LOAD_CONST']
|
||||||
|
|
||||||
|
|
||||||
|
def _code_imports(code, consts, names):
|
||||||
|
start = 4
|
||||||
|
while True:
|
||||||
|
op3_idx = code.find(IMPORT_NAME, start, -1)
|
||||||
|
if op3_idx < 0:
|
||||||
|
return
|
||||||
|
if op3_idx % 2:
|
||||||
|
start = op3_idx + 1
|
||||||
|
continue
|
||||||
|
if code[op3_idx-4] != LOAD_CONST or code[op3_idx-2] != LOAD_CONST:
|
||||||
|
start = op3_idx + 2
|
||||||
|
continue
|
||||||
|
start = op3_idx + 6
|
||||||
|
arg1, arg2, arg3 = code[op3_idx-3], code[op3_idx-1], code[op3_idx+1]
|
||||||
|
yield (consts[arg1], names[arg3], consts[arg2] or ())
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue