build: Install Python dependencies and initialize the virtual environment.
This commit is contained in:
610
audit_report.txt
Normal file
610
audit_report.txt
Normal file
@@ -0,0 +1,610 @@
|
||||
--- Loading Billing Data ---
|
||||
Loaded 1151 users from Billing.
|
||||
|
||||
--- Auditing router-dimensi-dell (103.138.63.178) ---
|
||||
Connecting to 103.138.63.178...
|
||||
Found 1450 PPP secrets on router.
|
||||
⚠️ FOUND 451 UNREGISTERED USERS (Exist on Router but NOT in Billing):
|
||||
- 101100057014_dudiasaduddin (Profile: star_100)
|
||||
- 1800006 (Profile: EXPIRED)
|
||||
- 2000035 (Profile: EXPIRED)
|
||||
- 2000037 (Profile: default)
|
||||
- 2000134 (Profile: EXPIRED)
|
||||
- 2000135 (Profile: EXPIRED)
|
||||
- 2000136 (Profile: EXPIRED)
|
||||
- 2000157 (Profile: EXPIRED)
|
||||
- 220404165721 (Profile: star_10)
|
||||
- 220404165722 (Profile: star_10)
|
||||
- 220404165723 (Profile: star_10)
|
||||
- 220430172116 (Profile: star_20)
|
||||
- 220612165038 (Profile: EXPIRED)
|
||||
- 220728201823 (Profile: EXPIRED)
|
||||
- 220728201827 (Profile: gold_10)
|
||||
- 220728201836 (Profile: EXPIRED)
|
||||
- 220728201837 (Profile: EXPIRED)
|
||||
- 221001182831 (Profile: EXPIRED)
|
||||
- 221128130251 (Profile: EXPIRED)
|
||||
- 221128130276 (Profile: EXPIRED)
|
||||
- 221128130277 (Profile: EXPIRED)
|
||||
- 221128130291 (Profile: EXPIRED)
|
||||
- 230220191151 (Profile: EXPIRED)
|
||||
- 230220191157 (Profile: EXPIRED)
|
||||
- 230308162057 (Profile: EXPIRED)
|
||||
- 600002 (Profile: star_10)
|
||||
- Test2 (Profile: EXPIRED)
|
||||
- abingglp (Profile: EXPIRED)
|
||||
- adiokta (Profile: star_10)
|
||||
- agusbudikbl (Profile: star_10)
|
||||
- agusgm@dms.net (Profile: star_30)
|
||||
- agusnovaglp@dms.net (Profile: star_10)
|
||||
- aguspurnamadlp (Profile: star_10)
|
||||
- akang@dms.net (Profile: bali_10)
|
||||
- alifpnd (Profile: EXPIRED)
|
||||
- ambaraglp (Profile: star_10)
|
||||
- anbksmkn3 (Profile: star_200)
|
||||
- ancigpnd@dms.net (Profile: star_10)
|
||||
- andika (Profile: bali_10)
|
||||
- andriani (Profile: gold_50)
|
||||
- apeldlt (Profile: star_30)
|
||||
- arbatech (Profile: bali_10)
|
||||
- ardanaglp (Profile: bali_10)
|
||||
- ardibiu (Profile: star_10)
|
||||
- arikdlt (Profile: star_20)
|
||||
- arnataglp (Profile: bali_10)
|
||||
- asacemenggon (Profile: star_20)
|
||||
- atenk (Profile: bali_10)
|
||||
- awd (Profile: star_10)
|
||||
- bagasdlp (Profile: bali_10)
|
||||
- baglugbiu (Profile: bali_10)
|
||||
- baharidlp (Profile: star_10)
|
||||
- bajink (Profile: bali_10)
|
||||
- balikreketglp (Profile: star_30)
|
||||
- bantas@dms.net (Profile: bali_10)
|
||||
- bazar (Profile: star_50)
|
||||
- bellys@dms.net (Profile: bali_10)
|
||||
- benikbiu (Profile: bali_10)
|
||||
- betok (Profile: star_10)
|
||||
- binbinbbk@dms.net (Profile: EXPIRED)
|
||||
- bintangglp@dms.net (Profile: bali_10)
|
||||
- biu (Profile: star_20)
|
||||
- bms (Profile: star_200)
|
||||
- brdlodtangluk (Profile: star_30)
|
||||
- brdlp (Profile: bale_banjar)
|
||||
- brgelulung (Profile: bali_20)
|
||||
- brglp@dms.net (Profile: bali_50)
|
||||
- brpekuwudan (Profile: bale_banjar)
|
||||
- brpinda (Profile: bale_banjar)
|
||||
- brtlb (Profile: star_20)
|
||||
- bukanikbtn@dms.net (Profile: bali_20)
|
||||
- bukidatlb (Profile: star_10)
|
||||
- bulis@dms.net (Profile: hemat)
|
||||
- bulustlb (Profile: star_10)
|
||||
- bunitaglp (Profile: EXPIRED)
|
||||
- butuhtbn@dms.net (Profile: bali_10)
|
||||
- cakratlb (Profile: star_10)
|
||||
- candysalon (Profile: star_20)
|
||||
- capunkglp (Profile: star_10)
|
||||
- caraka@dms.net (Profile: star_10)
|
||||
- cctv@dms.net (Profile: EXPIRED)
|
||||
- cctvtelabah (Profile: gold_20)
|
||||
- cdataepon (Profile: star_100)
|
||||
- cdatagpon (Profile: star_100)
|
||||
- ceraki@dms.net (Profile: bali_20)
|
||||
- china (Profile: lb_50)
|
||||
- crazy (Profile: bali_10)
|
||||
- dadap@dms.net (Profile: bali_10)
|
||||
- danisglp@dms.net (Profile: bali_10)
|
||||
- darmapnd (Profile: star_10)
|
||||
- darmika (Profile: bali_10)
|
||||
- dayuwikaglp (Profile: star_30)
|
||||
- dedesound (Profile: star_20)
|
||||
- dedokdlp@dms.net (Profile: bali_10)
|
||||
- dekamaglp (Profile: bali_10)
|
||||
- dekaryaplk@dms.net (Profile: bali_10)
|
||||
- dekbongolpnd (Profile: hemat)
|
||||
- dekcungvilla@dms.net (Profile: EXPIRED)
|
||||
- dekdang@dms.net (Profile: bali_10)
|
||||
- dekkhantreng@dms.net (Profile: EXPIRED)
|
||||
- dekkungtlb (Profile: star_10)
|
||||
- deklittlb@dms.net (Profile: EXPIRED)
|
||||
- deknyong@dms.net (Profile: bali_10)
|
||||
- dekpit@dms.net (Profile: EXPIRED)
|
||||
- dektengkbl (Profile: star_10)
|
||||
- denikpnd@dms.net (Profile: bali_10)
|
||||
- desawisatasukawati (Profile: bali_50)
|
||||
- devaglp (Profile: star_20)
|
||||
- devibdil (Profile: lb_10)
|
||||
- dewaastanaplk (Profile: bali_10)
|
||||
- dewadlt@dms.net (Profile: bali_10)
|
||||
- dewarakagrogak (Profile: star_20)
|
||||
- dewatut (Profile: bali_10)
|
||||
- diarmandlp (Profile: star_10)
|
||||
- dimensi (Profile: bali_20)
|
||||
- dinamo (Profile: bali_10)
|
||||
- dipaglp (Profile: star_20)
|
||||
- dmslive (Profile: star_150)
|
||||
- doglesplk@dms.net (Profile: star_10)
|
||||
- durus@dms.net (Profile: bali_10)
|
||||
- duryaglp@dms.net (Profile: star_10)
|
||||
- dwayubbn (Profile: star_20)
|
||||
- dwcahyanigrokgak (Profile: star_10)
|
||||
- dwipayanabiu (Profile: bali_10)
|
||||
- ediputraglp (Profile: star_10)
|
||||
- edo (Profile: free1)
|
||||
- ega (Profile: gold_20)
|
||||
- ega2 (Profile: bali_10)
|
||||
- ekabubun (Profile: bali_10)
|
||||
- ekaputrapnd@dms.net (Profile: star_20)
|
||||
- ekayenikdlp (Profile: bali_10)
|
||||
- elangglp (Profile: star_10)
|
||||
- endopurnama (Profile: star_20)
|
||||
- esaplk (Profile: EXPIRED)
|
||||
- esterplk (Profile: star_20)
|
||||
- fuller-duma (Profile: star_100)
|
||||
- fuller2 (Profile: star_150)
|
||||
- gajahglp (Profile: star_10)
|
||||
- galuhplk (Profile: star_10)
|
||||
- gap (Profile: bali_10)
|
||||
- gedearibtnglp (Profile: star_10)
|
||||
- genta (Profile: star_20)
|
||||
- gilinkglp (Profile: star_10)
|
||||
- gilinkglp@dms.net (Profile: bali_10)
|
||||
- gpon (Profile: gold_10)
|
||||
- gryakebon (Profile: bali_20)
|
||||
- grykarangmas (Profile: star_10)
|
||||
- gstpartaglp (Profile: star_10)
|
||||
- gudigglp (Profile: star_10)
|
||||
- gungdeskwti (Profile: star_30)
|
||||
- gusajidwijanatlb (Profile: bali_20)
|
||||
- gusajiputra (Profile: star_20)
|
||||
- gusbaskara (Profile: hemat)
|
||||
- gusdekawaglp2@dms.net (Profile: bali_10)
|
||||
- guskoyiktlb (Profile: star_10)
|
||||
- gusmanadyanta@dms.net (Profile: star_20)
|
||||
- gusmanrai@dms.net (Profile: bali_20)
|
||||
- gussantikaglp (Profile: star_30)
|
||||
- gussasglp (Profile: star_20)
|
||||
- gussucikatlb@dms.net (Profile: star_10)
|
||||
- gussulasi (Profile: hemat)
|
||||
- gussupartika (Profile: star_20)
|
||||
- gussuryatlb (Profile: star_10)
|
||||
- gustuanomtlb (Profile: star_10)
|
||||
- gustut (Profile: bali_10)
|
||||
- gusyusglp@dms.net (Profile: EXPIRED)
|
||||
- hendrabiu (Profile: gold_10)
|
||||
- hendrakbl (Profile: star_10)
|
||||
- hsgq (Profile: star_200)
|
||||
- huawei2 (Profile: star_20)
|
||||
- huawei3 (Profile: star_20)
|
||||
- iasantiniglp@dms.net (Profile: star_10)
|
||||
- ibadyatmaja (Profile: star_20)
|
||||
- ibukceluk@dms.net (Profile: bali_10)
|
||||
- indahpratiwipnd (Profile: star_10)
|
||||
- irmaglp@dms.net (Profile: bali_10)
|
||||
- januadipnd (Profile: bali_10)
|
||||
- jayen@dms.net (Profile: bali_10)
|
||||
- jering@dms.net (Profile: bali_10)
|
||||
- jikbatuh@dms.net (Profile: bali_20)
|
||||
- jrokarin (Profile: star_20)
|
||||
- jrosudita@dms.net (Profile: bali_10)
|
||||
- kaderpnd (Profile: bali_10)
|
||||
- kadusglp (Profile: bali_10)
|
||||
- kalpagudang (Profile: bali_10)
|
||||
- kalpawarung (Profile: star_20)
|
||||
- kanpar (Profile: bali_20)
|
||||
- kardana (Profile: gold_10)
|
||||
- karglp (Profile: star_10)
|
||||
- karianaglp (Profile: star_10)
|
||||
- karibtn (Profile: star_10)
|
||||
- kdaldidlp (Profile: bali_10)
|
||||
- kdcahyanigll (Profile: star_20)
|
||||
- kdmuliastraglp (Profile: bali_10)
|
||||
- kelokplk (Profile: star_20)
|
||||
- kembanggirang@dms.net (Profile: EXPIRED)
|
||||
- kembarglp (Profile: star_10)
|
||||
- kenanfree (Profile: free1)
|
||||
- keniten@dms.net (Profile: star_30)
|
||||
- keri@dms.net (Profile: star_10)
|
||||
- ketutdarsa@dms.net (Profile: star_10)
|
||||
- ketutsedana@dms.net (Profile: star_20)
|
||||
- kmarimuliawantlb (Profile: star_10)
|
||||
- kmgdeglp (Profile: bali_10)
|
||||
- kmlasbtnbnd (Profile: star_20)
|
||||
- kmmantepbnd (Profile: hemat)
|
||||
- kmngsuparta@dms.net (Profile: bali_10)
|
||||
- kmsrinadidlp (Profile: star_10)
|
||||
- koliglp@dms.net (Profile: bali_10)
|
||||
- komangratih@dms.net (Profile: star_10)
|
||||
- komeng (Profile: star_10)
|
||||
- korwilskwt (Profile: star_10)
|
||||
- kost2tuadhi@kebalian (Profile: star_20)
|
||||
- krishnatlb@dms.net (Profile: star_10)
|
||||
- ksppermata (Profile: star_30)
|
||||
- ksu-peninjoan (Profile: star_30)
|
||||
- ksuglp (Profile: bali_10)
|
||||
- ktmutikaglp@dms.net (Profile: bali_10)
|
||||
- kubukayana (Profile: EXPIRED)
|
||||
- kumaralilawati (Profile: star_20)
|
||||
- kumpul (Profile: star_10)
|
||||
- kuncungpnd (Profile: hemat)
|
||||
- kunyukglp@dms.net (Profile: bali_10)
|
||||
- kuwinktlb (Profile: star_20)
|
||||
- laksanatlb (Profile: star_20)
|
||||
- lazan@dms.net (Profile: star_20)
|
||||
- lelutplk (Profile: star_10)
|
||||
- lengotdlp (Profile: bali_20)
|
||||
- liongbkl@dms.net (Profile: bali_10)
|
||||
- liongdlp (Profile: bali_10)
|
||||
- lionkglp (Profile: star_20)
|
||||
- loletbiu (Profile: star_10)
|
||||
- lpd@pinda (Profile: star_20)
|
||||
- lpdbnd (Profile: bale_banjar)
|
||||
- lpdsukawati (Profile: star_100)
|
||||
- luhanaglp@dms.net (Profile: bali_10)
|
||||
- lupuspnd (Profile: star_20)
|
||||
- made (Profile: star_10)
|
||||
- madebakat@dms.net (Profile: bali_10)
|
||||
- mandoro (Profile: gold_10)
|
||||
- mangatikplk@dms.net (Profile: bali_10)
|
||||
- mangbayu@dms.net (Profile: bali_10)
|
||||
- mangbracukglp (Profile: EXPIRED)
|
||||
- mangcuk@dms.net (Profile: EXPIRED)
|
||||
- mangcukglp@dms.net (Profile: bali_10)
|
||||
- manggulik@dms.net (Profile: EXPIRED)
|
||||
- mangnikpkwd (Profile: star_10)
|
||||
- manlet@dms.net (Profile: bali_10)
|
||||
- mannettlb@dms.net (Profile: star_20)
|
||||
- mardawaglp (Profile: star_10)
|
||||
- markunceluk (Profile: lb_10)
|
||||
- mayundlp@dms.net (Profile: bali_10)
|
||||
- mdbagiartapkwd (Profile: star_10)
|
||||
- mdgriadlp (Profile: star_10)
|
||||
- mdsangutbnd (Profile: star_10)
|
||||
- mdtresnakbl@dms.net (Profile: bali_10)
|
||||
- mdwidastrasanga (Profile: star_10)
|
||||
- meranakbl (Profile: star_10)
|
||||
- mira (Profile: star_30)
|
||||
- mkbagiastraglp@dms.net (Profile: bali_10)
|
||||
- mkbije-free-mawang (Profile: star_50)
|
||||
- mkmerta@dms.net (Profile: star_20)
|
||||
- mksanggra@dms.net (Profile: bali_20)
|
||||
- mokbalikmecutan (Profile: bali_10)
|
||||
- molenglp (Profile: star_10)
|
||||
- mologkos@sanga (Profile: star_100)
|
||||
- moyoglp@dms.net (Profile: star_20)
|
||||
- mundrapnd@dms.net (Profile: star_10)
|
||||
- murjapnd (Profile: bali_10)
|
||||
- murjaya (Profile: bali_10)
|
||||
- musahendrianbtn (Profile: EXPIRED)
|
||||
- mustiari-warung-bonbiu (Profile: hemat)
|
||||
- narkaglp (Profile: EXPIRED)
|
||||
- ngurahokabiu (Profile: bali_10)
|
||||
- ngurahokabiu@dms.net (Profile: bali_10)
|
||||
- nogita-koroh-sakah (Profile: EXPIRED)
|
||||
- nurananyoktlb (Profile: star_10)
|
||||
- nuranikglp (Profile: bali_10)
|
||||
- nuriantoglp@dms.net (Profile: bali_10)
|
||||
- nvr (Profile: star_20)
|
||||
- nyangkring (Profile: bali_10)
|
||||
- nymsukrawanglp (Profile: star_20)
|
||||
- nyomanmuliartabiu@dms.net (Profile: hemat)
|
||||
- ogik@dms.net (Profile: star_10)
|
||||
- okikglp (Profile: bali_10)
|
||||
- openglp (Profile: bali_10)
|
||||
- padmabali (Profile: star_30)
|
||||
- pakbudi3 (Profile: star_50)
|
||||
- pakgedeeka (Profile: star_30)
|
||||
- pakirglp@dms.net (Profile: bali_10)
|
||||
- pakjendradlp (Profile: star_10)
|
||||
- pakkiuttlb@dms.net (Profile: bali_10)
|
||||
- pakkurglp@dms.net (Profile: star_20)
|
||||
- pakmandya@dms.net (Profile: star_10)
|
||||
- pakmetabtn (Profile: EXPIRED)
|
||||
- pakrinaglp@dms.net (Profile: bali_10)
|
||||
- pakslametmecutan (Profile: bali_20)
|
||||
- paktapamecutan (Profile: star_10)
|
||||
- pakteja (Profile: bali_20)
|
||||
- pakwayah (Profile: gold_10)
|
||||
- pakyanpejeng (Profile: star_20)
|
||||
- panderestudlp (Profile: star_20)
|
||||
- pangalihgll (Profile: bali_20)
|
||||
- panterglp (Profile: star_10)
|
||||
- pantomin (Profile: bali_10)
|
||||
- paramarthaglp@dms.net (Profile: star_20)
|
||||
- pelaspnd@dms.net (Profile: EXPIRED)
|
||||
- percobaanbnd@dms.net (Profile: hemat)
|
||||
- petruktbn (Profile: star_10)
|
||||
- pkbalikspd (Profile: free1)
|
||||
- ponixglp (Profile: star_10)
|
||||
- pranata-karang-bonbiu (Profile: star_20)
|
||||
- prayoga (Profile: gold_10)
|
||||
- ptsumaryantopkwd (Profile: star_10)
|
||||
- puradesa@banda (Profile: hemat)
|
||||
- purapandedlp (Profile: star_20)
|
||||
- purauluncariksanga (Profile: bale_banjar)
|
||||
- purnayasa@dms.net (Profile: bali_10)
|
||||
- purwati@ppurnama (Profile: bali_10)
|
||||
- puspaaman (Profile: bali_20)
|
||||
- puspayudadlp (Profile: bali_10)
|
||||
- putraadnyanadlp (Profile: gold_10)
|
||||
- putraaluminium (Profile: star_20)
|
||||
- putrawaringin (Profile: EXPIRED)
|
||||
- putuadhi@dms.net (Profile: star_10)
|
||||
- putuadhibbk2 (Profile: gold_10)
|
||||
- putuadhisakura (Profile: gold_10)
|
||||
- putuarix (Profile: bali_10)
|
||||
- putumahendra2 (Profile: star_20)
|
||||
- putumahendraglp@dms.net (Profile: bali_10)
|
||||
- rahbegok (Profile: star_20)
|
||||
- raiglp (Profile: star_10)
|
||||
- raras (Profile: star_20)
|
||||
- rastapnd@dms.net (Profile: bali_10)
|
||||
- rb750 (Profile: star_100)
|
||||
- renahome (Profile: bali_10)
|
||||
- renaskubu2 (Profile: star_50)
|
||||
- reniawatipnd (Profile: star_10)
|
||||
- rianpnd@dms.net (Profile: star_10)
|
||||
- richapnd (Profile: hemat)
|
||||
- robot (Profile: bali_50)
|
||||
- rosiantotlb (Profile: gold_10)
|
||||
- ruditatlb (Profile: star_10)
|
||||
- rugihpnd@dms.net (Profile: hemat)
|
||||
- sadarpnd@dms.net (Profile: bali_10)
|
||||
- salonlaksmi (Profile: star_20)
|
||||
- sambukglp (Profile: gold_10)
|
||||
- sanjayakbl (Profile: star_30)
|
||||
- santikaglp (Profile: star_20)
|
||||
- saris@dms.net (Profile: star_10)
|
||||
- sarwagatah (Profile: star_50)
|
||||
- sdn3 (Profile: star_100)
|
||||
- sedanayoga (Profile: bali_10)
|
||||
- semadiasaglp (Profile: star_20)
|
||||
- seni (Profile: star_20)
|
||||
- server (Profile: star_10)
|
||||
- sinsinbatuan (Profile: bali_10)
|
||||
- sinsindlp (Profile: bali_20)
|
||||
- sman1sukawati (Profile: bali_150)
|
||||
- smartmedia (Profile: star_20)
|
||||
- smc (Profile: star_500)
|
||||
- smctest (Profile: star_50)
|
||||
- smkn3sukawati (Profile: star_500)
|
||||
- sotongbnd (Profile: star_20)
|
||||
- srisedana2 (Profile: star_20)
|
||||
- storing@dms.net (Profile: star_20)
|
||||
- suaja (Profile: bali_10)
|
||||
- suardanadlp (Profile: star_10)
|
||||
- suardanadlp@dms.net (Profile: bali_10)
|
||||
- suartejapnd@dms.net (Profile: bali_10)
|
||||
- sudadlp (Profile: bali_10)
|
||||
- sudanapnd@dms.net (Profile: star_10)
|
||||
- sudantapnd (Profile: lb_10)
|
||||
- sudarsana2 (Profile: gold_10)
|
||||
- sudarsanadlt@dms.net (Profile: bali_10)
|
||||
- sudawadlp (Profile: star_10)
|
||||
- sudiarsasaingkbl (Profile: EXPIRED)
|
||||
- sudiartakbl (Profile: bali_10)
|
||||
- sudibyapnd (Profile: lb_20)
|
||||
- sudirmantlb (Profile: star_10)
|
||||
- sujaglp@dms.net (Profile: bali_10)
|
||||
- sukarma (Profile: free1)
|
||||
- sukarmaplkfree (Profile: hemat)
|
||||
- sukaryaplk (Profile: star_20)
|
||||
- sukawanbbk (Profile: star_20)
|
||||
- sukerta@dms.net (Profile: star_10)
|
||||
- sukertapnd@dms.net (Profile: star_20)
|
||||
- sukmadewaglp (Profile: bali_10)
|
||||
- sukmajaya (Profile: star_10)
|
||||
- sukmajaya2 (Profile: star_20)
|
||||
- sulasdlp@dms.net (Profile: star_20)
|
||||
- sunarsapnd@dms.net (Profile: bali_10)
|
||||
- sunartidlp (Profile: star_10)
|
||||
- sundentlb (Profile: star_10)
|
||||
- suratakbl@dms.net (Profile: bali_10)
|
||||
- suryapnd@dms.net (Profile: hemat)
|
||||
- suta@dms.net (Profile: star_10)
|
||||
- sutamakbl@dms.net (Profile: bali_10)
|
||||
- suwandikatlb@dms.net (Profile: EXPIRED)
|
||||
- tabig (Profile: bali_20)
|
||||
- tahtaglp (Profile: star_10)
|
||||
- tanpa-vlan (Profile: star_50)
|
||||
- tarkapinda (Profile: bali_10)
|
||||
- test (Profile: star_50)
|
||||
- testhsgq (Profile: bali_20)
|
||||
- tikdlp (Profile: star_10)
|
||||
- tinkglp (Profile: bali_10)
|
||||
- tisentlb (Profile: hemat)
|
||||
- tomblosglp (Profile: star_10)
|
||||
- tomiglp@dms.net (Profile: hemat)
|
||||
- tudedlp (Profile: bali_10)
|
||||
- tusuar@dms.net (Profile: bali_10)
|
||||
- tutbar@dms.net (Profile: EXPIRED)
|
||||
- tutbuhglp@dms.net (Profile: star_10)
|
||||
- tutjaglp (Profile: bali_10)
|
||||
- tutnix (Profile: bali_20)
|
||||
- udimecutan (Profile: bali_10)
|
||||
- ulambanten (Profile: star_20)
|
||||
- vega (Profile: star_50)
|
||||
- viana (Profile: star_50)
|
||||
- wahyuglp (Profile: bali_10)
|
||||
- wahyupkwd (Profile: star_20)
|
||||
- wajibglp (Profile: star_10)
|
||||
- wajibpnd (Profile: star_10)
|
||||
- warplk@dms.net (Profile: bali_10)
|
||||
- warungabyan (Profile: hemat)
|
||||
- wawanglp (Profile: star_10)
|
||||
- widhati (Profile: gold_50)
|
||||
- widiastradlp@dms.net (Profile: bali_10)
|
||||
- widiastratlb@dms.net (Profile: lb_10)
|
||||
- wiguna (Profile: star_20)
|
||||
- win10 (Profile: star_20)
|
||||
- wira@dms.net (Profile: bali_10)
|
||||
- wiskbl (Profile: star_10)
|
||||
- wizglp (Profile: bali_10)
|
||||
- wrbagas (Profile: star_10)
|
||||
- wyrukapurnama (Profile: star_30)
|
||||
- wysutakbl (Profile: star_10)
|
||||
- xpon (Profile: star_100)
|
||||
- yanbug@dms.net (Profile: star_10)
|
||||
- yancandraglp (Profile: gold_10)
|
||||
- yandiglp@dms.net (Profile: bali_10)
|
||||
- yanjawa@dms.net (Profile: star_10)
|
||||
- yanraka@dms.net (Profile: EXPIRED)
|
||||
- yantih (Profile: star_20)
|
||||
- yogaprasetya@dms.net (Profile: EXPIRED)
|
||||
- yogatrijataglp@dms.net (Profile: bali_10)
|
||||
- yogik (Profile: star_100)
|
||||
- youngkypnd@dms.net (Profile: bali_10)
|
||||
- yudapustaka (Profile: EXPIRED)
|
||||
- yuliaripnd (Profile: star_20)
|
||||
|
||||
--- Auditing ccr1036 (103.138.63.184) ---
|
||||
Connecting to 103.138.63.184...
|
||||
Found 300 PPP secrets on router.
|
||||
⚠️ FOUND 147 UNREGISTERED USERS (Exist on Router but NOT in Billing):
|
||||
- 220430172111 (Profile: star_20)
|
||||
- abingglp (Profile: star_20)
|
||||
- agusgm@dms.net (Profile: star_30)
|
||||
- akang@dms.net (Profile: star_10)
|
||||
- ancigpnd@dms.net (Profile: star_10)
|
||||
- andika (Profile: star_10)
|
||||
- andriani (Profile: star_50)
|
||||
- arbatech (Profile: star_10)
|
||||
- asacemenggon (Profile: star_20)
|
||||
- astika-glp (Profile: star_20)
|
||||
- atenk (Profile: star_10)
|
||||
- balikreketglp (Profile: star_10)
|
||||
- bellys@dms.net (Profile: star_10)
|
||||
- bintangglp@dms.net (Profile: star_10)
|
||||
- brpinda (Profile: bale_banjar)
|
||||
- brtlb (Profile: star_20)
|
||||
- bukanikbtn@dms.net (Profile: star_20)
|
||||
- bukidatlb (Profile: star_10)
|
||||
- bulis@dms.net (Profile: hemat)
|
||||
- bulustlb (Profile: star_10)
|
||||
- bupda-sukawati (Profile: star_20)
|
||||
- cakratlb (Profile: star_10)
|
||||
- caraka@dms.net (Profile: star_20)
|
||||
- chandra-adnyana-glp (Profile: hemat)
|
||||
- crazy (Profile: star_10)
|
||||
- dadap@dms.net (Profile: star_10)
|
||||
- darmapnd (Profile: star_10)
|
||||
- darmika (Profile: star_10)
|
||||
- dekbongolpnd (Profile: hemat)
|
||||
- dekdang@dms.net (Profile: star_10)
|
||||
- dekkungtlb (Profile: star_10)
|
||||
- denikpnd@dms.net (Profile: star_10)
|
||||
- dewadlt@dms.net (Profile: star_10)
|
||||
- dewatut (Profile: star_10)
|
||||
- dextra-free-mawangkelod-888 (Profile: star_20)
|
||||
- dinamo (Profile: star_10)
|
||||
- dipaglp (Profile: star_20)
|
||||
- doglesplk@dms.net (Profile: star_10)
|
||||
- edo (Profile: free1)
|
||||
- ekaputrapnd@dms.net (Profile: star_20)
|
||||
- gajahglp (Profile: star_10)
|
||||
- gedearibtnglp (Profile: star_10)
|
||||
- grykarangmas (Profile: star_10)
|
||||
- gudigglp (Profile: star_10)
|
||||
- gungdeskwti (Profile: star_30)
|
||||
- gusajiputra (Profile: star_20)
|
||||
- guskoyiktlb (Profile: star_10)
|
||||
- gusmanadyanta@dms.net (Profile: star_20)
|
||||
- gusmanrai@dms.net (Profile: star_20)
|
||||
- iasantiniglp@dms.net (Profile: star_10)
|
||||
- indahpratiwipnd (Profile: star_10)
|
||||
- irmaglp@dms.net (Profile: star_10)
|
||||
- januadipnd (Profile: star_10)
|
||||
- jayen@dms.net (Profile: star_10)
|
||||
- jikbatuh@dms.net (Profile: star_20)
|
||||
- jrosudita@dms.net (Profile: star_10)
|
||||
- kaderpnd (Profile: star_20)
|
||||
- kanpar (Profile: star_20)
|
||||
- karibtn (Profile: star_10)
|
||||
- kembarglp (Profile: star_10)
|
||||
- keniten@dms.net (Profile: star_10)
|
||||
- keri@dms.net (Profile: star_10)
|
||||
- ketutsedana@dms.net (Profile: star_20)
|
||||
- kmgdeglp (Profile: star_10)
|
||||
- kmngsuparta@dms.net (Profile: star_10)
|
||||
- komeng (Profile: star_10)
|
||||
- krishnatlb@dms.net (Profile: star_10)
|
||||
- ksuglp (Profile: star_10)
|
||||
- ktmutikaglp@dms.net (Profile: star_10)
|
||||
- kuncungpnd (Profile: hemat)
|
||||
- kunyukglp@dms.net (Profile: star_10)
|
||||
- lpd@pinda (Profile: star_20)
|
||||
- lupuspnd (Profile: star_20)
|
||||
- madebakat@dms.net (Profile: star_10)
|
||||
- mangbayu@dms.net (Profile: star_10)
|
||||
- mangcukglp@dms.net (Profile: star_10)
|
||||
- mannettlb@dms.net (Profile: star_20)
|
||||
- mdtresnakbl@dms.net (Profile: star_20)
|
||||
- mira (Profile: star_30)
|
||||
- mkbagiastraglp@dms.net (Profile: star_10)
|
||||
- mksanggra@dms.net (Profile: star_20)
|
||||
- moyoglp@dms.net (Profile: star_20)
|
||||
- mundrapnd@dms.net (Profile: star_10)
|
||||
- murjapnd (Profile: star_10)
|
||||
- murjaya (Profile: star_10)
|
||||
- openglp (Profile: star_10)
|
||||
- padmabali (Profile: star_30)
|
||||
- pakirglp@dms.net (Profile: star_10)
|
||||
- pakkiuttlb@dms.net (Profile: star_10)
|
||||
- panterglp (Profile: star_10)
|
||||
- pantomin (Profile: star_10)
|
||||
- paramarthaglp@dms.net (Profile: star_20)
|
||||
- pelaspnd@dms.net (Profile: star_20)
|
||||
- prayoga (Profile: star_10)
|
||||
- putraaluminium (Profile: star_20)
|
||||
- putuadhi@dms.net (Profile: star_10)
|
||||
- putuadhibbk2 (Profile: star_10)
|
||||
- putuarix (Profile: star_10)
|
||||
- putumahendraglp@dms.net (Profile: star_10)
|
||||
- raiglp (Profile: star_10)
|
||||
- rastapnd@dms.net (Profile: star_10)
|
||||
- reniawatipnd (Profile: star_10)
|
||||
- rianpnd@dms.net (Profile: star_10)
|
||||
- richapnd (Profile: hemat)
|
||||
- rikiglp@dms.net (Profile: star_10)
|
||||
- rosiantotlb (Profile: star_10)
|
||||
- rugihpnd@dms.net (Profile: hemat)
|
||||
- rustawan-gll (Profile: star_20)
|
||||
- sadarpnd@dms.net (Profile: star_10)
|
||||
- salonlaksmi (Profile: star_30)
|
||||
- sambukglp (Profile: star_10)
|
||||
- saris@dms.net (Profile: star_10)
|
||||
- semadiasaglp (Profile: star_20)
|
||||
- smctest (Profile: star_20)
|
||||
- storing@dms.net (Profile: star_20)
|
||||
- suartejapnd@dms.net (Profile: star_10)
|
||||
- sudanapnd@dms.net (Profile: star_10)
|
||||
- sudantapnd (Profile: star_10)
|
||||
- sudarsanadlt@dms.net (Profile: star_10)
|
||||
- sudibyapnd (Profile: star_20)
|
||||
- sudirmantlb (Profile: star_10)
|
||||
- sukerta@dms.net (Profile: star_10)
|
||||
- sukertapnd@dms.net (Profile: star_20)
|
||||
- sunarsapnd@dms.net (Profile: star_10)
|
||||
- sundentlb (Profile: star_10)
|
||||
- suryapnd@dms.net (Profile: hemat)
|
||||
- tahtaglp (Profile: star_10)
|
||||
- tarkapinda (Profile: star_10)
|
||||
- tisentlb (Profile: hemat)
|
||||
- tusuar@dms.net (Profile: star_10)
|
||||
- tutbar@dms.net (Profile: star_20)
|
||||
- tutjaglp (Profile: star_10)
|
||||
- tutnix (Profile: star_20)
|
||||
- udimecutan (Profile: star_10)
|
||||
- viana (Profile: star_50)
|
||||
- wajibpnd (Profile: star_10)
|
||||
- warplk@dms.net (Profile: EXPIRED)
|
||||
- widhati (Profile: star_50)
|
||||
- widiastratlb@dms.net (Profile: star_10)
|
||||
- wira@dms.net (Profile: star_10)
|
||||
- wirayasa-glp (Profile: hemat)
|
||||
- yancandraglp (Profile: star_10)
|
||||
- yandiglp@dms.net (Profile: star_10)
|
||||
- yogatrijataglp@dms.net (Profile: star_10)
|
||||
- youngkypnd@dms.net (Profile: star_10)
|
||||
- yuda-hendrawan-banda (Profile: hemat)
|
||||
- yuliaripnd (Profile: star_20)
|
||||
92
audit_users.py
Normal file
92
audit_users.py
Normal file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
# Add src to path to import BillingDatabase
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
from src.billing import BillingDatabase
|
||||
|
||||
# Connection helper
|
||||
def fetch_router_secrets(host, port, user, password):
|
||||
url = f"http://{host}:{port}/rest/ppp/secret"
|
||||
try:
|
||||
print(f"Connecting to {host}...")
|
||||
resp = requests.get(url, auth=HTTPBasicAuth(user, password), timeout=10)
|
||||
resp.raise_for_status()
|
||||
data = resp.json()
|
||||
# Ensure list
|
||||
return data if isinstance(data, list) else [data] if data else []
|
||||
except Exception as e:
|
||||
print(f"Error fetching from {host}: {e}")
|
||||
return []
|
||||
|
||||
def audit():
|
||||
# 1. Load Billing Data
|
||||
print("--- Loading Billing Data ---")
|
||||
config_path = os.path.join(os.path.dirname(__file__), 'config.json')
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
billing = BillingDatabase(config['billing_databases'])
|
||||
# Force refresh or just load? Let's use search with empty query to get all from cache
|
||||
# If cache is old, audit might be wrong. But user didn't ask to refresh.
|
||||
# However, for safety, let's try to get all from current snapshot server.
|
||||
# Assuming 'dimensi' is the main one.
|
||||
|
||||
# We need to know which billing server to compare against. Assuming ALL users in billing are valid.
|
||||
# So we get all users from the default/active snapshot.
|
||||
res = billing.search_customers("", limit=10000) # Get all
|
||||
if not res['success']:
|
||||
print(f"Failed to load billing data: {res.get('error')}")
|
||||
return
|
||||
|
||||
billing_users = {c.get('user_mikrotik') for c in res['customers'] if c.get('user_mikrotik')}
|
||||
print(f"Loaded {len(billing_users)} users from Billing.")
|
||||
|
||||
# 2. Define Routers
|
||||
routers = [
|
||||
{
|
||||
"name": "router-dimensi-dell",
|
||||
"host": "103.138.63.178",
|
||||
"port": 80,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
},
|
||||
{
|
||||
"name": "ccr1036",
|
||||
"host": "103.138.63.184",
|
||||
"port": 80,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
}
|
||||
]
|
||||
|
||||
# 3. Audit Each Router
|
||||
for r in routers:
|
||||
print(f"\n--- Auditing {r['name']} ({r['host']}) ---")
|
||||
secrets = fetch_router_secrets(r['host'], r['port'], r['user'], r['pass'])
|
||||
print(f"Found {len(secrets)} PPP secrets on router.")
|
||||
|
||||
unregistered = []
|
||||
for s in secrets:
|
||||
name = s.get('name')
|
||||
# Ignore auto-generated or system users if any? usually simple check
|
||||
if name and name not in billing_users:
|
||||
unregistered.append(name)
|
||||
|
||||
if unregistered:
|
||||
print(f"⚠️ FOUND {len(unregistered)} UNREGISTERED USERS (Exist on Router but NOT in Billing):")
|
||||
for u in sorted(unregistered):
|
||||
# Maybe print profile too to see if it's a real user
|
||||
# finding the secret object again for detail
|
||||
sec_obj = next((x for x in secrets if x['name'] == u), {})
|
||||
profile = sec_obj.get('profile', '?')
|
||||
print(f" - {u} (Profile: {profile})")
|
||||
else:
|
||||
print("✅ All users on this router are registered in billing.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
audit()
|
||||
27
config.example.json
Normal file
27
config.example.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"vultr": {
|
||||
"api_key": "your_vultr_api_key_here"
|
||||
},
|
||||
"server": {
|
||||
"host": "127.0.0.1",
|
||||
"port": 8000,
|
||||
"log_level": "info"
|
||||
},
|
||||
"billing_databases": {
|
||||
"1": {
|
||||
"alias": "Primary Server",
|
||||
"host": "localhost",
|
||||
"user": "root",
|
||||
"pass": "password",
|
||||
"name": "billing_db",
|
||||
"port": 3306
|
||||
},
|
||||
"2": {
|
||||
"alias": "Backup Server",
|
||||
"host": "remote.host.com",
|
||||
"user": "backup_user",
|
||||
"pass": "backup_pass",
|
||||
"name": "billing_backup"
|
||||
}
|
||||
}
|
||||
}
|
||||
50
config.json
Normal file
50
config.json
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
"isps": {
|
||||
"dimensi": {
|
||||
"alias": "Dimensi Media Solusi",
|
||||
"billing": {
|
||||
"host": "103.138.63.188",
|
||||
"user": "chatbot",
|
||||
"pass": "afSXzFn_tLlve(@H",
|
||||
"name": "billinggold"
|
||||
},
|
||||
"routers": {
|
||||
"router-dimensi-dell": {
|
||||
"host": "103.138.63.178",
|
||||
"port": 80,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
},
|
||||
"ccr1036": {
|
||||
"host": "103.138.63.184",
|
||||
"port": 80,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
}
|
||||
}
|
||||
},
|
||||
"smc": {
|
||||
"alias": "SMC",
|
||||
"billing": {
|
||||
"host": "139.180.190.239",
|
||||
"user": "chatbot",
|
||||
"pass": "afSXzFn_tLlve(@H",
|
||||
"name": "smc"
|
||||
},
|
||||
"routers": {
|
||||
"router-smc": {
|
||||
"host": "103.138.63.183",
|
||||
"port": 81,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
},
|
||||
"router-smc-lb": {
|
||||
"host": "103.138.63.183",
|
||||
"port": 80,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
25
config.json.bak
Normal file
25
config.json.bak
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"billing_databases": {
|
||||
"dimensi": {
|
||||
"alias": "Dimensi Media Solusi",
|
||||
"host": "103.138.63.188",
|
||||
"user": "chatbot",
|
||||
"pass": "afSXzFn_tLlve(@H",
|
||||
"name": "billinggold"
|
||||
}
|
||||
},
|
||||
"routers": {
|
||||
"router-dimensi-dell": {
|
||||
"host": "103.138.63.178",
|
||||
"port": 80,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
},
|
||||
"ccr1036": {
|
||||
"host": "103.138.63.184",
|
||||
"port": 80,
|
||||
"user": "chatbot",
|
||||
"pass": "K0s0ng11@2026"
|
||||
}
|
||||
}
|
||||
}
|
||||
80
not_used/test_billing_tools.py
Normal file
80
not_used/test_billing_tools.py
Normal file
@@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test Billing Tools
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Add src to path
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Set dummy environment variables for testing
|
||||
os.environ['BILLING_DB_1_HOST'] = 'localhost'
|
||||
os.environ['BILLING_DB_1_USER'] = 'test'
|
||||
os.environ['BILLING_DB_1_PASS'] = 'test'
|
||||
os.environ['BILLING_DB_1_NAME'] = 'testdb'
|
||||
|
||||
# Also test legacy format
|
||||
os.environ['BILLING_DB_HOST'] = 'legacyhost'
|
||||
os.environ['BILLING_DB_USER'] = 'legacyuser'
|
||||
os.environ['BILLING_DB_PASS'] = 'legacypass'
|
||||
os.environ['BILLING_DB_NAME'] = 'legacydb'
|
||||
|
||||
from src.vultr_mcp.billing import BillingDatabase
|
||||
|
||||
def test_billing_module():
|
||||
print("Testing BillingDatabase module...")
|
||||
|
||||
try:
|
||||
# Create instance
|
||||
billing = BillingDatabase()
|
||||
|
||||
# Test server loading
|
||||
servers = billing.list_servers()
|
||||
print(f"Loaded servers: {len(servers)}")
|
||||
for server in servers:
|
||||
print(f" Server {server['server_id']}: {server['host']} ({server['database']})")
|
||||
|
||||
# Test get_server_config
|
||||
config1 = billing.get_server_config('1')
|
||||
print(f"\nServer 1 config: {config1['host'] if config1 else 'None'}")
|
||||
|
||||
# Test server 2 (should not exist)
|
||||
config2 = billing.get_server_config('2')
|
||||
print(f"Server 2 config: {'Exists' if config2 else 'None'}")
|
||||
|
||||
# Test check_connection (will fail but should handle gracefully)
|
||||
print("\nTesting connection check (will fail due to dummy credentials):")
|
||||
result = billing.check_connection('1')
|
||||
print(f"Success: {result.get('success', False)}")
|
||||
if not result.get('success'):
|
||||
print(f"Error: {result.get('error', 'Unknown')}")
|
||||
if result.get('hint'):
|
||||
print(f"Hint: {result['hint']}")
|
||||
|
||||
# Test search customers (should fail gracefully)
|
||||
print("\nTesting search customers (will fail):")
|
||||
search_result = billing.search_customers('test', '1', 5, 0)
|
||||
print(f"Success: {search_result.get('success', False)}")
|
||||
|
||||
# Test normalize phone number
|
||||
print("\nTesting phone normalization:")
|
||||
test_numbers = ['62812345678', '0812345678', '62812345678@c.us']
|
||||
for num in test_numbers:
|
||||
normalized = billing._normalize_phone_number(num)
|
||||
print(f" {num} -> {normalized}")
|
||||
|
||||
print("\n✅ Billing module tests completed (expected failures due to dummy credentials)")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error testing billing module: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == '__main__':
|
||||
success = test_billing_module()
|
||||
sys.exit(0 if success else 1)
|
||||
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
mcp
|
||||
pymysql
|
||||
requests
|
||||
python-dotenv
|
||||
5
run_server.sh
Executable file
5
run_server.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
cd "$(dirname "$0")"
|
||||
source venv/bin/activate
|
||||
export PYTHONPATH=$PYTHONPATH:$(pwd)
|
||||
python3 src/server.py
|
||||
BIN
src/__pycache__/billing.cpython-311.pyc
Normal file
BIN
src/__pycache__/billing.cpython-311.pyc
Normal file
Binary file not shown.
BIN
src/__pycache__/server.cpython-311.pyc
Normal file
BIN
src/__pycache__/server.cpython-311.pyc
Normal file
Binary file not shown.
385
src/billing.py
Normal file
385
src/billing.py
Normal file
@@ -0,0 +1,385 @@
|
||||
"""
|
||||
Billing Database Module for MCP Server (with Snapshot Caching)
|
||||
Provides tools to query customer billing databases with multi-server support.
|
||||
Data is read from a local snapshot for performance; use refresh_snapshot() to update.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
import pymysql
|
||||
from pymysql import MySQLError
|
||||
from typing import Dict, List, Any, Optional, Union
|
||||
|
||||
# --- Constants ---
|
||||
# Place cache in a dedicated, git-ignored directory
|
||||
CACHE_DIR = os.path.join(os.path.dirname(__file__), '..', '..', '.cache')
|
||||
CACHE_FILE_PREFIX = "billing_snapshot_"
|
||||
|
||||
# Ensure cache directory exists
|
||||
os.makedirs(CACHE_DIR, exist_ok=True)
|
||||
|
||||
|
||||
class BillingDatabase:
|
||||
"""Billing database client with multi-server support and snapshot caching."""
|
||||
|
||||
def __init__(self, db_configs: Dict[str, Dict[str, str]]):
|
||||
"""
|
||||
Initialize with database configurations.
|
||||
|
||||
Args:
|
||||
db_configs: Dictionary of server configurations from config.json.
|
||||
"""
|
||||
self.servers = self._load_servers_from_config(db_configs)
|
||||
|
||||
def _get_cache_path(self, server_id: str) -> str:
|
||||
"""Get the file path for a server's cache."""
|
||||
return os.path.join(CACHE_DIR, f"{CACHE_FILE_PREFIX}{server_id}.json")
|
||||
|
||||
def _load_servers_from_config(self, db_configs: Dict) -> Dict[str, Dict[str, Any]]:
|
||||
"""Load and validate server configurations from a dictionary."""
|
||||
servers = {}
|
||||
if not isinstance(db_configs, dict):
|
||||
return {}
|
||||
|
||||
for server_id, config in db_configs.items():
|
||||
required_keys = ['host', 'user', 'pass', 'name']
|
||||
if not all(key in config for key in required_keys):
|
||||
print(f"Warning: Skipping server '{server_id}' due to missing configuration.")
|
||||
continue
|
||||
|
||||
servers[server_id] = {
|
||||
'host': config['host'],
|
||||
'user': config['user'],
|
||||
'password': config['pass'],
|
||||
'database': config['name'],
|
||||
'alias': config.get('alias', f"Server {server_id}"),
|
||||
'connect_timeout': config.get('connect_timeout', 10),
|
||||
'port': config.get('port', 3306)
|
||||
}
|
||||
return servers
|
||||
|
||||
def _resolve_server_id(self, server_id: str) -> str:
|
||||
"""
|
||||
Resolve the server ID.
|
||||
If 'server_id' exists, return it.
|
||||
If 'server_id' is not found, but we only have one server configured, return that one.
|
||||
This provides a robust fallback for single-server setups.
|
||||
"""
|
||||
if server_id in self.servers:
|
||||
return server_id
|
||||
|
||||
# If requested ID not found, and we have exactly one server, assume that's the one
|
||||
if len(self.servers) == 1:
|
||||
return list(self.servers.keys())[0]
|
||||
|
||||
# If default "1" requested but not found (and we have multiple providers), return first
|
||||
if server_id == "1" and self.servers:
|
||||
return list(self.servers.keys())[0]
|
||||
|
||||
return server_id
|
||||
|
||||
def get_server_config(self, server_id: str = "1") -> Optional[Dict[str, Any]]:
|
||||
"""Get configuration for a specific server."""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
return self.servers.get(resolved_id)
|
||||
|
||||
def list_servers(self) -> List[Dict[str, Any]]:
|
||||
"""List all configured database servers and their cache status."""
|
||||
result = []
|
||||
for server_id, config in self.servers.items():
|
||||
cache_path = self._get_cache_path(server_id)
|
||||
cache_status = "Not created"
|
||||
last_updated = "N/A"
|
||||
if os.path.exists(cache_path):
|
||||
try:
|
||||
with open(cache_path, 'r', encoding='utf-8') as f:
|
||||
cache_data = json.load(f)
|
||||
record_count = len(cache_data.get('customers', []))
|
||||
timestamp = cache_data.get('meta', {}).get('timestamp', 0)
|
||||
last_updated = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
||||
cache_status = f"Ready ({record_count} records)"
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
cache_status = "Corrupted"
|
||||
|
||||
result.append({
|
||||
'server_id': server_id,
|
||||
'alias': config.get('alias', f"Server {server_id}"),
|
||||
'host': config['host'],
|
||||
'database': config['database'],
|
||||
'cache_status': cache_status,
|
||||
'cache_last_updated': last_updated
|
||||
})
|
||||
return result
|
||||
|
||||
def _get_connection(self, server_id: str = "1"):
|
||||
"""Create database connection."""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
config = self.get_server_config(resolved_id)
|
||||
if not config:
|
||||
available = list(self.servers.keys())
|
||||
raise ValueError(f"Server '{server_id}' not found. Available servers: {available}")
|
||||
|
||||
return pymysql.connect(
|
||||
host=config['host'],
|
||||
user=config['user'],
|
||||
password=config['password'],
|
||||
database=config['database'],
|
||||
port=config.get('port', 3306),
|
||||
connect_timeout=config.get('connect_timeout', 10),
|
||||
charset='utf8mb4',
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
def refresh_snapshot(self, server_id: str = "1") -> Dict[str, Any]:
|
||||
"""
|
||||
Fetch all customer data from the database and save it to a local snapshot.
|
||||
This is a potentially long-running operation.
|
||||
|
||||
Args:
|
||||
server_id: The ID of the server to snapshot.
|
||||
|
||||
Returns:
|
||||
A dictionary with the result of the operation.
|
||||
"""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
try:
|
||||
connection = self._get_connection(resolved_id)
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
# Comprehensive query to get all necessary data in one go
|
||||
sql = """
|
||||
SELECT
|
||||
c.*,
|
||||
pi.name as package_name,
|
||||
pi.price as package_price,
|
||||
pi.description as package_description
|
||||
FROM customer c
|
||||
LEFT JOIN services s ON c.no_services = s.no_services
|
||||
LEFT JOIN package_item pi ON s.item_id = pi.p_item_id
|
||||
"""
|
||||
cursor.execute(sql)
|
||||
customers = cursor.fetchall()
|
||||
|
||||
# Create snapshot data structure
|
||||
snapshot_data = {
|
||||
"meta": {
|
||||
"server_id": server_id,
|
||||
"timestamp": datetime.now().timestamp(),
|
||||
"customer_count": len(customers)
|
||||
},
|
||||
"customers": customers
|
||||
}
|
||||
|
||||
# Save to cache file
|
||||
cache_path = self._get_cache_path(resolved_id)
|
||||
with open(cache_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(snapshot_data, f, indent=2, default=str) # Use default=str for dates
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Snapshot for server '{resolved_id}' refreshed successfully.",
|
||||
"customer_count": len(customers)
|
||||
}
|
||||
finally:
|
||||
connection.close()
|
||||
except (MySQLError, ValueError, IOError) as e:
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
def _load_snapshot(self, server_id: str) -> List[Dict[str, Any]]:
|
||||
"""Load customer data from the local snapshot."""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
cache_path = self._get_cache_path(resolved_id)
|
||||
if not os.path.exists(cache_path):
|
||||
raise FileNotFoundError(f"Snapshot for server '{server_id}' not found. "
|
||||
f"Please run 'billing_refresh_snapshot' first.")
|
||||
|
||||
with open(cache_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
return data.get('customers', [])
|
||||
|
||||
def search_customers(
|
||||
self,
|
||||
search_query: str = "",
|
||||
server_id: str = "1",
|
||||
limit: int = 50,
|
||||
offset: int = 0
|
||||
) -> Dict[str, Any]:
|
||||
"""Search customers from the local snapshot."""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
try:
|
||||
customers = self._load_snapshot(resolved_id)
|
||||
|
||||
filtered_customers = []
|
||||
if search_query:
|
||||
# 1. Preprocess: Handle "natural" separation like "router 1" -> "router:1"
|
||||
# We target specific known keys to avoid breaking names
|
||||
search_query_proc = search_query
|
||||
natural_keys = r'(router|mitra|status|profile|type|action)'
|
||||
search_query_proc = re.sub(f'{natural_keys}\s+([a-zA-Z0-9_]+)', r'\1:\2', search_query_proc, flags=re.IGNORECASE)
|
||||
|
||||
# 2. Preprocess: Remove common stopwords that might break "AND" search
|
||||
# e.g. "Budi di router 1" -> "Budi router:1" (removing "di")
|
||||
stopwords = r'\b(di|pada|in|at|with|by)\b'
|
||||
search_query_proc = re.sub(stopwords, '', search_query_proc, flags=re.IGNORECASE)
|
||||
|
||||
# Split query into tokens by space
|
||||
tokens = search_query_proc.split()
|
||||
|
||||
for cust in customers:
|
||||
match_all = True
|
||||
|
||||
for token in tokens:
|
||||
if ':' in token:
|
||||
# Handle field:value syntax
|
||||
key, val = [p.strip() for p in token.split(':', 1)]
|
||||
val_lower = val.lower()
|
||||
field_val = str(cust.get(key, '')).lower()
|
||||
|
||||
# Strict match for field:value
|
||||
if field_val != val_lower:
|
||||
match_all = False
|
||||
break
|
||||
else:
|
||||
# Handle generic text search
|
||||
token_lower = token.lower()
|
||||
# Check multiple fields
|
||||
if not (token_lower in str(cust.get('name', '')).lower() or
|
||||
token_lower in str(cust.get('no_wa', '')).lower() or
|
||||
token_lower in str(cust.get('address', '')).lower() or
|
||||
token_lower in str(cust.get('user_profile', '')).lower()):
|
||||
match_all = False
|
||||
break
|
||||
|
||||
if match_all:
|
||||
filtered_customers.append(cust)
|
||||
else:
|
||||
filtered_customers = customers
|
||||
|
||||
total = len(filtered_customers)
|
||||
paginated_results = filtered_customers[offset : offset + limit]
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'source': 'snapshot',
|
||||
'server_id': resolved_id,
|
||||
'search_query': search_query,
|
||||
'total': total,
|
||||
'limit': limit,
|
||||
'offset': offset,
|
||||
'customers': paginated_results
|
||||
}
|
||||
except (FileNotFoundError, json.JSONDecodeError, KeyError) as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def get_customer_details(
|
||||
self,
|
||||
customer_id: Optional[str] = None,
|
||||
phone_number: Optional[str] = None,
|
||||
server_id: str = "1"
|
||||
) -> Dict[str, Any]:
|
||||
"""Get detailed customer information from the local snapshot."""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
if not customer_id and not phone_number:
|
||||
return {'success': False, 'error': 'Either customer_id or phone_number must be provided'}
|
||||
|
||||
try:
|
||||
customers = self._load_snapshot(resolved_id)
|
||||
|
||||
if customer_id:
|
||||
for cust in customers:
|
||||
if str(cust.get('no_services')) == customer_id:
|
||||
return {'success': True, 'source': 'snapshot', 'customer': cust}
|
||||
|
||||
elif phone_number:
|
||||
search_numbers = self._normalize_phone_number(phone_number)
|
||||
for cust in customers:
|
||||
if cust.get('no_wa') in search_numbers:
|
||||
return {'success': True, 'source': 'snapshot', 'customer': cust}
|
||||
|
||||
return {'success': False, 'error': 'Customer not found in snapshot'}
|
||||
except (FileNotFoundError, json.JSONDecodeError, KeyError) as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def _normalize_phone_number(self, phone_number: Optional[str]) -> List[str]:
|
||||
"""Normalize phone number for search with Indonesian format variations."""
|
||||
if not phone_number:
|
||||
return []
|
||||
|
||||
p = phone_number.replace('@c.us', '')
|
||||
search_numbers = [p]
|
||||
if p.startswith('62'):
|
||||
search_numbers.append('0' + p[2:])
|
||||
elif p.startswith('0'):
|
||||
search_numbers.append('62' + p[1:])
|
||||
|
||||
return list(dict.fromkeys(search_numbers)) # Unique list
|
||||
|
||||
def get_customer_summary(self, server_id: str = "1") -> Dict[str, Any]:
|
||||
"""Get customer statistics summary from the local snapshot."""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
try:
|
||||
customers = self._load_snapshot(resolved_id)
|
||||
|
||||
total = len(customers)
|
||||
status_distribution = {}
|
||||
recent_count = 0
|
||||
thirty_days_ago = datetime.now().timestamp() - (30 * 24 * 3600)
|
||||
|
||||
for cust in customers:
|
||||
# Status distribution
|
||||
status = cust.get('c_status', 'Unknown')
|
||||
status_distribution[status] = status_distribution.get(status, 0) + 1
|
||||
|
||||
# Recent customers - assuming due_date is just the day of the month
|
||||
# This logic is likely flawed without a full date. Let's assume a different logic
|
||||
# For this example, we'll assume a 'created_at' field if it exists, otherwise skip
|
||||
# Let's use `due_date` as day and assume current month/year for a rough idea.
|
||||
# A proper implementation needs a full timestamp field.
|
||||
|
||||
# Formatting status distribution with percentage
|
||||
status_dist_list = []
|
||||
for status, count in sorted(status_distribution.items(), key=lambda item: item[1], reverse=True):
|
||||
percentage = round((count / total) * 100, 2) if total > 0 else 0
|
||||
status_dist_list.append({'c_status': status, 'count': count, 'percentage': percentage})
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'source': 'snapshot',
|
||||
'server_id': resolved_id,
|
||||
'total_customers': total,
|
||||
'status_distribution': status_dist_list
|
||||
}
|
||||
except (FileNotFoundError, json.JSONDecodeError, KeyError) as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def check_connection(self, server_id: str = "1") -> Dict[str, Any]:
|
||||
"""
|
||||
Perform a LIVE check of the database connection, bypassing the cache.
|
||||
"""
|
||||
resolved_id = self._resolve_server_id(server_id)
|
||||
try:
|
||||
config = self.get_server_config(resolved_id)
|
||||
if not config:
|
||||
return {'success': False, 'error': f"Server '{resolved_id}' (requested: '{server_id}') not configured"}
|
||||
|
||||
connection = self._get_connection(resolved_id)
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute('SELECT VERSION() as version')
|
||||
version_info = cursor.fetchone()
|
||||
cursor.execute('SELECT COUNT(*) as customer_count FROM customer')
|
||||
count_info = cursor.fetchone()
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Live connection to database was successful.',
|
||||
'host': config['host'],
|
||||
'database': config['database'],
|
||||
'version': version_info['version'] if version_info else 'N/A',
|
||||
'customer_count': count_info['customer_count'] if count_info else 0
|
||||
}
|
||||
finally:
|
||||
connection.close()
|
||||
except MySQLError as e:
|
||||
return {'success': False, 'error': f"Live connection failed: {e}"}
|
||||
317
src/server.py
Normal file
317
src/server.py
Normal file
@@ -0,0 +1,317 @@
|
||||
import asyncio
|
||||
import os
|
||||
import json
|
||||
import traceback
|
||||
from typing import Any, Dict, List, Optional
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from mcp.server import Server
|
||||
from mcp.server.stdio import stdio_server
|
||||
from mcp.types import Tool, TextContent, ImageContent, EmbeddedResource
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Add current directory to path to ensure imports work if run from everywhere
|
||||
import sys
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from src.billing import BillingDatabase
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Initialize Server
|
||||
app = Server("billing-mcp")
|
||||
|
||||
# Global instance
|
||||
billing_db: Optional[BillingDatabase] = None
|
||||
config: Dict[str, Any] = {}
|
||||
|
||||
# Map to store which router belongs to which ISP (Billing DB Server ID)
|
||||
# Format: { "router_host_or_name": "isp_server_id" }
|
||||
router_isp_map: Dict[str, str] = {}
|
||||
# Map to store resolved router config
|
||||
# Format: { "router_host_or_name": { "host": "...", "port": ..., "user": "...", "pass": "..." } }
|
||||
resolved_router_configs: Dict[str, Dict] = {}
|
||||
|
||||
def load_config():
|
||||
global config, router_isp_map, resolved_router_configs
|
||||
config_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'config.json')
|
||||
if os.path.exists(config_path):
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Parse ISP configs to build router map
|
||||
if 'isps' in config:
|
||||
for isp_id, isp_data in config['isps'].items():
|
||||
routers = isp_data.get('routers', {})
|
||||
for r_name, r_conf in routers.items():
|
||||
# Map Name -> ISP ID
|
||||
router_isp_map[r_name] = isp_id
|
||||
# Map IP -> ISP ID
|
||||
if 'host' in r_conf:
|
||||
router_isp_map[r_conf['host']] = isp_id
|
||||
|
||||
# Store resolved config for easy lookup
|
||||
resolved_router_configs[r_name] = r_conf
|
||||
if 'host' in r_conf:
|
||||
resolved_router_configs[r_conf['host']] = r_conf
|
||||
else:
|
||||
print(f"Warning: Config file not found at {config_path}", file=sys.stderr)
|
||||
|
||||
def get_billing_db() -> BillingDatabase:
|
||||
global billing_db
|
||||
if billing_db is None:
|
||||
load_config()
|
||||
|
||||
# Prepare configs for BillingDatabase
|
||||
db_configs = {}
|
||||
|
||||
if 'isps' in config:
|
||||
# Flatten ISP structure for BillingDatabase
|
||||
# BillingDatabase expects { "server_id": { config... } }
|
||||
for isp_id, isp_data in config['isps'].items():
|
||||
if 'billing' in isp_data:
|
||||
# Provide default alias if missing
|
||||
billing_conf = isp_data['billing']
|
||||
if 'alias' not in billing_conf and 'alias' in isp_data:
|
||||
billing_conf['alias'] = isp_data['alias']
|
||||
db_configs[isp_id] = billing_conf
|
||||
elif 'billing_databases' in config:
|
||||
# Legacy fallback
|
||||
db_configs = config['billing_databases']
|
||||
|
||||
billing_db = BillingDatabase(db_configs)
|
||||
|
||||
return billing_db
|
||||
|
||||
@app.list_tools()
|
||||
async def list_tools() -> List[Tool]:
|
||||
return [
|
||||
Tool(
|
||||
name="search_customers",
|
||||
description="Search for customers in the billing database. Returns specific customer details only.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "Search query (User ID, Name, or IP address). Leave empty to list all (use with limit)."
|
||||
},
|
||||
"limit": {
|
||||
"type": "integer",
|
||||
"description": "Maximum number of results to return. Default is 5.",
|
||||
"default": 5
|
||||
},
|
||||
"isp_name": {
|
||||
"type": "string",
|
||||
"description": "Optional: Specific ISP/Provider name to search (e.g. 'dimensi'). If omitted, defaults to first available."
|
||||
}
|
||||
},
|
||||
"required": ["query"]
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="refresh_billing_snapshot",
|
||||
description="Force refresh of the local billing snapshot from the MySQL database.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"isp_name": {
|
||||
"type": "string",
|
||||
"description": "Optional: Specific ISP/Provider name to refresh. If omitted, refreshes default."
|
||||
}
|
||||
},
|
||||
}
|
||||
),
|
||||
Tool(
|
||||
name="audit_router_users",
|
||||
description="Audit a MikroTik router to find users that are NOT in the billing database. Credentials will be auto-loaded from config if not provided.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"router_host": {
|
||||
"type": "string",
|
||||
"description": "IP address or Name of the router (e.g. 'ccr1036')"
|
||||
},
|
||||
"router_port": {
|
||||
"type": "integer",
|
||||
"description": "Port of the router API (default 80)",
|
||||
"default": 80
|
||||
},
|
||||
"router_user": {
|
||||
"type": "string",
|
||||
"description": "Username for router (optional if configured)"
|
||||
},
|
||||
"router_pass": {
|
||||
"type": "string",
|
||||
"description": "Password for router (optional if configured)"
|
||||
}
|
||||
},
|
||||
"required": ["router_host"]
|
||||
}
|
||||
)
|
||||
),
|
||||
Tool(
|
||||
name="list_system_info",
|
||||
description="List all configured ISPs and their associated Routers.",
|
||||
inputSchema={
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
}
|
||||
)
|
||||
]
|
||||
|
||||
@app.call_tool()
|
||||
async def call_tool(name: str, arguments: Any) -> List[TextContent | ImageContent | EmbeddedResource]:
|
||||
db = get_billing_db()
|
||||
|
||||
if name == "search_customers":
|
||||
query = arguments.get("query", "")
|
||||
limit = arguments.get("limit", 5)
|
||||
isp_name = arguments.get("isp_name", "1") # Default logic in simple mode
|
||||
|
||||
# If isp_name is not provided, BillingDatabase likely handles "1" as default fallback
|
||||
# or we might want to iterate all if query is global?
|
||||
# For now, let's stick to single server/default behavior unless specified.
|
||||
|
||||
result = db.search_customers(query, server_id=isp_name, limit=limit)
|
||||
|
||||
if not result['success']:
|
||||
return [TextContent(type="text", text=f"Error searching ({isp_name}): {result.get('error')}")]
|
||||
|
||||
customers = result.get('customers', [])
|
||||
if not customers:
|
||||
return [TextContent(type="text", text=f"No customers found matching '{query}'.")]
|
||||
|
||||
# Format output
|
||||
output_lines = [f"Found {len(customers)} customers (ISP: {result.get('server_id')}):"]
|
||||
for c in customers:
|
||||
# Format key details
|
||||
details = [
|
||||
f"User: {c.get('user_mikrotik', 'N/A')}",
|
||||
f"Name: {c.get('name', 'N/A')}",
|
||||
f"Status: {c.get('c_status', 'N/A')}",
|
||||
f"Address: {c.get('address', 'N/A')}",
|
||||
f"Packet: {c.get('user_profile', 'N/A')}"
|
||||
]
|
||||
output_lines.append(" | ".join(details))
|
||||
|
||||
return [TextContent(type="text", text="\n".join(output_lines))]
|
||||
|
||||
elif name == "refresh_billing_snapshot":
|
||||
isp_name = arguments.get("isp_name", "1")
|
||||
success = db.refresh_snapshot(server_id=isp_name)
|
||||
if success:
|
||||
return [TextContent(type="text", text=f"Billing snapshot for '{isp_name}' refreshed successfully.")]
|
||||
else:
|
||||
return [TextContent(type="text", text="Failed to refresh billing snapshot. Check logs.")]
|
||||
|
||||
elif name == "audit_router_users":
|
||||
host_arg = arguments.get("router_host")
|
||||
port = arguments.get("router_port", 80)
|
||||
user = arguments.get("router_user")
|
||||
password = arguments.get("router_pass")
|
||||
|
||||
# 0. Identify ISP/Provider context
|
||||
target_host = host_arg
|
||||
isp_context = "1" # Default
|
||||
|
||||
# Lookup in map
|
||||
if host_arg in router_isp_map:
|
||||
isp_context = router_isp_map[host_arg]
|
||||
|
||||
# Auto-lookup credentials
|
||||
if not user or not password:
|
||||
found_config = resolved_router_configs.get(host_arg)
|
||||
|
||||
# Fallback check internal routers config (legacy)
|
||||
if not found_config and 'routers' in config:
|
||||
if host_arg in config['routers']:
|
||||
found_config = config['routers'][host_arg]
|
||||
else:
|
||||
for k, v in config['routers'].items():
|
||||
if v.get('host') == host_arg:
|
||||
found_config = v
|
||||
break
|
||||
|
||||
if found_config:
|
||||
user = found_config.get('user')
|
||||
password = found_config.get('pass')
|
||||
target_host = found_config.get('host', target_host)
|
||||
port = found_config.get('port', port)
|
||||
else:
|
||||
return [TextContent(type="text", text=f"Error: Credentials not provided and router '{host_arg}' not found in config.")]
|
||||
|
||||
# 1. Fetch router secrets
|
||||
url = f"http://{target_host}:{port}/rest/ppp/secret"
|
||||
try:
|
||||
resp = requests.get(url, auth=HTTPBasicAuth(user, password), timeout=10)
|
||||
resp.raise_for_status()
|
||||
secrets_data = resp.json()
|
||||
router_secrets = secrets_data if isinstance(secrets_data, list) else [secrets_data] if secrets_data else []
|
||||
except Exception as e:
|
||||
return [TextContent(type="text", text=f"Error connecting to router {target_host}: {str(e)}")]
|
||||
|
||||
# 2. Get Billing Users (FROM SPECIFIC ISP CONTEXT)
|
||||
# We assume strict isolation: Only compare against the ISP's billing DB.
|
||||
res = db.search_customers("", server_id=isp_context, limit=10000)
|
||||
|
||||
if not res['success']:
|
||||
return [TextContent(type="text", text=f"Error loading billing data for ISP '{isp_context}': {res.get('error')}")]
|
||||
|
||||
billing_users = {c.get('user_mikrotik') for c in res['customers'] if c.get('user_mikrotik')}
|
||||
|
||||
# 3. Compare
|
||||
unregistered = []
|
||||
for s in router_secrets:
|
||||
s_name = s.get('name')
|
||||
if s_name and s_name not in billing_users:
|
||||
unregistered.append(f"{s_name} (Profile: {s.get('profile', '?')})")
|
||||
|
||||
if unregistered:
|
||||
return [TextContent(type="text", text=f"Found {len(unregistered)} Unregistered Users on {target_host} (ISP: {isp_context}):\n" + "\n".join(sorted(unregistered)))]
|
||||
else:
|
||||
return [TextContent(type="text", text=f"All {len(router_secrets)} users on router {target_host} (ISP: {isp_context}) are valid.")]
|
||||
|
||||
elif name == "list_system_info":
|
||||
if 'isps' not in config:
|
||||
return [TextContent(type="text", text="No ISPs configured (Legacy mode or empty config).")]
|
||||
|
||||
output = ["Computed System Configuration:", ""]
|
||||
|
||||
for isp_id, data in config['isps'].items():
|
||||
alias = data.get('alias', isp_id)
|
||||
billing_host = data.get('billing', {}).get('host', 'N/A')
|
||||
billing_db_name = data.get('billing', {}).get('name', 'N/A')
|
||||
|
||||
output.append(f"🌐 ISP: {alias} (ID: {isp_id})")
|
||||
output.append(f" Using Billing DB: {billing_db_name} @ {billing_host}")
|
||||
|
||||
routers = data.get('routers', {})
|
||||
if routers:
|
||||
output.append(" 📡 Registered Routers:")
|
||||
for r_name, r_conf in routers.items():
|
||||
host = r_conf.get('host', 'N/A')
|
||||
port = r_conf.get('port', 80)
|
||||
output.append(f" - {r_name} ({host}:{port})")
|
||||
else:
|
||||
output.append(" ⚠️ No routers configured.")
|
||||
|
||||
output.append("") # Empty line separator
|
||||
|
||||
return [TextContent(type="text", text="\n".join(output))]
|
||||
|
||||
raise ValueError(f"Unknown tool: {name}")
|
||||
|
||||
async def main():
|
||||
# Run the server using stdin/stdout
|
||||
async with stdio_server() as (read_stream, write_stream):
|
||||
await app.run(
|
||||
read_stream,
|
||||
write_stream,
|
||||
app.create_initialization_options()
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
112
test_billing_real.py
Normal file
112
test_billing_real.py
Normal file
@@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test Billing Tools with real configuration from config.json
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
|
||||
# Add src to path
|
||||
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
from src.billing import BillingDatabase
|
||||
|
||||
def load_real_config():
|
||||
config_path = os.path.join(os.path.dirname(__file__), 'config.json')
|
||||
if not os.path.exists(config_path):
|
||||
return None
|
||||
with open(config_path, 'r') as f:
|
||||
return json.load(f)
|
||||
|
||||
def test_billing_with_real_config():
|
||||
print("Testing BillingDatabase with real configuration...")
|
||||
|
||||
try:
|
||||
config = load_real_config()
|
||||
if not config:
|
||||
print("❌ config.json not found")
|
||||
return False
|
||||
|
||||
db_configs = {}
|
||||
if 'isps' in config:
|
||||
for isp_id, isp_data in config['isps'].items():
|
||||
if 'billing' in isp_data:
|
||||
conf = isp_data['billing']
|
||||
if 'alias' not in conf:
|
||||
conf['alias'] = isp_data.get('alias', isp_id)
|
||||
db_configs[isp_id] = conf
|
||||
elif 'billing_databases' in config:
|
||||
db_configs = config.get("billing_databases", {})
|
||||
|
||||
if not db_configs:
|
||||
print("❌ No billing configuration (isps or billing_databases) found in config.json")
|
||||
return False
|
||||
|
||||
# Create instance
|
||||
billing = BillingDatabase(db_configs)
|
||||
|
||||
# Test server loading
|
||||
servers = billing.list_servers()
|
||||
print(f"Loaded servers: {len(servers)}")
|
||||
for server in servers:
|
||||
print(f" Server {server['server_id']}: {server['host']} ({server['database']})")
|
||||
|
||||
if not servers:
|
||||
print("❌ No servers configured")
|
||||
return False
|
||||
|
||||
# Test connection to first server
|
||||
server_id = servers[0]['server_id']
|
||||
print(f"\nTesting connection to server {server_id}...")
|
||||
|
||||
result = billing.check_connection(server_id)
|
||||
|
||||
if result.get('success'):
|
||||
print(f"✅ Connection successful!")
|
||||
print(f" Host: {result.get('host')}")
|
||||
print(f" Database: {result.get('database')}")
|
||||
print(f" Version: {result.get('version')}")
|
||||
print(f" Customer count: {result.get('customer_count')}")
|
||||
|
||||
# Test search if connection successful
|
||||
print("\nTesting search customers (first 5)...")
|
||||
# Note: Search uses snapshot, so we might need to refresh first if no snapshot exists
|
||||
# For test purposes, we will try to refresh snapshot first briefly
|
||||
print(" Refusing snapshot first (may take a moment)...")
|
||||
billing.refresh_snapshot(server_id)
|
||||
|
||||
search_result = billing.search_customers('', server_id, 5, 0)
|
||||
if search_result.get('success'):
|
||||
print(f"✅ Search successful: {len(search_result.get('customers', []))} customers")
|
||||
for i, customer in enumerate(search_result.get('customers', [])[:3]): # Show first 3
|
||||
print(f" {i+1}. {customer.get('name', 'N/A')} - {customer.get('no_wa', 'N/A')}")
|
||||
else:
|
||||
print(f"❌ Search failed: {search_result.get('error', 'Unknown error')}")
|
||||
|
||||
# Test summary
|
||||
print("\nTesting customer summary...")
|
||||
summary_result = billing.get_customer_summary(server_id)
|
||||
if summary_result.get('success'):
|
||||
print(f"✅ Summary successful:")
|
||||
print(f" Total customers: {summary_result.get('total_customers', 0)}")
|
||||
if summary_result.get('status_distribution'):
|
||||
for status in summary_result.get('status_distribution', [])[:5]:
|
||||
print(f" - {status.get('c_status', 'N/A')}: {status.get('count', 0)} ({status.get('percentage', 0)}%)")
|
||||
else:
|
||||
print(f"❌ Summary failed: {summary_result.get('error', 'Unknown error')}")
|
||||
|
||||
else:
|
||||
print(f"❌ Connection failed: {result.get('error', 'Unknown error')}")
|
||||
|
||||
return result.get('success', False)
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Error testing billing module: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
if __name__ == '__main__':
|
||||
success = test_billing_with_real_config()
|
||||
sys.exit(0 if success else 1)
|
||||
247
venv/bin/Activate.ps1
Normal file
247
venv/bin/Activate.ps1
Normal file
@@ -0,0 +1,247 @@
|
||||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||
69
venv/bin/activate
Normal file
69
venv/bin/activate
Normal file
@@ -0,0 +1,69 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV=/home/wartana/myApp/billing-mcp/venv
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/"bin":$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1='(venv) '"${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT='(venv) '
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r 2> /dev/null
|
||||
fi
|
||||
26
venv/bin/activate.csh
Normal file
26
venv/bin/activate.csh
Normal file
@@ -0,0 +1,26 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV /home/wartana/myApp/billing-mcp/venv
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = '(venv) '"$prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT '(venv) '
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
||||
69
venv/bin/activate.fish
Normal file
69
venv/bin/activate.fish
Normal file
@@ -0,0 +1,69 @@
|
||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/); you cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV /home/wartana/myApp/billing-mcp/venv
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) '(venv) ' (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT '(venv) '
|
||||
end
|
||||
8
venv/bin/dotenv
Executable file
8
venv/bin/dotenv
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from dotenv.__main__ import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
||||
8
venv/bin/httpx
Executable file
8
venv/bin/httpx
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from httpx import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/jsonschema
Executable file
8
venv/bin/jsonschema
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from jsonschema.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/mcp
Executable file
8
venv/bin/mcp
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from mcp.cli import app
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(app())
|
||||
8
venv/bin/normalizer
Executable file
8
venv/bin/normalizer
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from charset_normalizer.cli import cli_detect
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli_detect())
|
||||
8
venv/bin/pip
Executable file
8
venv/bin/pip
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3
Executable file
8
venv/bin/pip3
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
8
venv/bin/pip3.11
Executable file
8
venv/bin/pip3.11
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
1
venv/bin/python
Symbolic link
1
venv/bin/python
Symbolic link
@@ -0,0 +1 @@
|
||||
python3
|
||||
1
venv/bin/python3
Symbolic link
1
venv/bin/python3
Symbolic link
@@ -0,0 +1 @@
|
||||
/usr/bin/python3
|
||||
1
venv/bin/python3.11
Symbolic link
1
venv/bin/python3.11
Symbolic link
@@ -0,0 +1 @@
|
||||
python3
|
||||
8
venv/bin/uvicorn
Executable file
8
venv/bin/uvicorn
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/home/wartana/myApp/billing-mcp/venv/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from uvicorn.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
||||
@@ -0,0 +1,7 @@
|
||||
Authors
|
||||
=======
|
||||
|
||||
``pyjwt`` is currently written and maintained by `Jose Padilla <https://github.com/jpadilla>`_.
|
||||
Originally written and maintained by `Jeff Lindsay <https://github.com/progrium>`_.
|
||||
|
||||
A full list of contributors can be found on GitHub’s `overview <https://github.com/jpadilla/pyjwt/graphs/contributors>`_.
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015-2022 José Padilla
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -0,0 +1,106 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: PyJWT
|
||||
Version: 2.10.1
|
||||
Summary: JSON Web Token implementation in Python
|
||||
Author-email: Jose Padilla <hello@jpadilla.com>
|
||||
License: MIT
|
||||
Project-URL: Homepage, https://github.com/jpadilla/pyjwt
|
||||
Keywords: json,jwt,security,signing,token,web
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Topic :: Utilities
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
License-File: AUTHORS.rst
|
||||
Provides-Extra: crypto
|
||||
Requires-Dist: cryptography>=3.4.0; extra == "crypto"
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: coverage[toml]==5.0.4; extra == "dev"
|
||||
Requires-Dist: cryptography>=3.4.0; extra == "dev"
|
||||
Requires-Dist: pre-commit; extra == "dev"
|
||||
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "dev"
|
||||
Requires-Dist: sphinx; extra == "dev"
|
||||
Requires-Dist: sphinx-rtd-theme; extra == "dev"
|
||||
Requires-Dist: zope.interface; extra == "dev"
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx; extra == "docs"
|
||||
Requires-Dist: sphinx-rtd-theme; extra == "docs"
|
||||
Requires-Dist: zope.interface; extra == "docs"
|
||||
Provides-Extra: tests
|
||||
Requires-Dist: coverage[toml]==5.0.4; extra == "tests"
|
||||
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "tests"
|
||||
|
||||
PyJWT
|
||||
=====
|
||||
|
||||
.. image:: https://github.com/jpadilla/pyjwt/workflows/CI/badge.svg
|
||||
:target: https://github.com/jpadilla/pyjwt/actions?query=workflow%3ACI
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pyjwt.svg
|
||||
:target: https://pypi.python.org/pypi/pyjwt
|
||||
|
||||
.. image:: https://codecov.io/gh/jpadilla/pyjwt/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/jpadilla/pyjwt
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pyjwt/badge/?version=stable
|
||||
:target: https://pyjwt.readthedocs.io/en/stable/
|
||||
|
||||
A Python implementation of `RFC 7519 <https://tools.ietf.org/html/rfc7519>`_. Original implementation was written by `@progrium <https://github.com/progrium>`_.
|
||||
|
||||
Sponsor
|
||||
-------
|
||||
|
||||
.. |auth0-logo| image:: https://github.com/user-attachments/assets/ee98379e-ee76-4bcb-943a-e25c4ea6d174
|
||||
:width: 160px
|
||||
|
||||
+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/signup <https://auth0.com/signup?utm_source=external_sites&utm_medium=pyjwt&utm_campaign=devn_signup>`_. |
|
||||
+--------------+-----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install with **pip**:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install PyJWT
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> import jwt
|
||||
>>> encoded = jwt.encode({"some": "payload"}, "secret", algorithm="HS256")
|
||||
>>> print(encoded)
|
||||
eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzb21lIjoicGF5bG9hZCJ9.4twFt5NiznN84AWoo1d7KO1T_yoc0Z6XOpOVswacPZg
|
||||
>>> jwt.decode(encoded, "secret", algorithms=["HS256"])
|
||||
{'some': 'payload'}
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
View the full docs online at https://pyjwt.readthedocs.io/en/stable/
|
||||
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
You can run tests from the project root after cloning with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox
|
||||
@@ -0,0 +1,32 @@
|
||||
PyJWT-2.10.1.dist-info/AUTHORS.rst,sha256=klzkNGECnu2_VY7At89_xLBF3vUSDruXk3xwgUBxzwc,322
|
||||
PyJWT-2.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyJWT-2.10.1.dist-info/LICENSE,sha256=eXp6ICMdTEM-nxkR2xcx0GtYKLmPSZgZoDT3wPVvXOU,1085
|
||||
PyJWT-2.10.1.dist-info/METADATA,sha256=EkewF6D6KU8SGaaQzVYfxUUU1P_gs_dp1pYTkoYvAx8,3990
|
||||
PyJWT-2.10.1.dist-info/RECORD,,
|
||||
PyJWT-2.10.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
||||
PyJWT-2.10.1.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4
|
||||
jwt/__init__.py,sha256=VB2vFKuboTjcDGeZ8r-UqK_dz3NsQSQEqySSICby8Xg,1711
|
||||
jwt/__pycache__/__init__.cpython-311.pyc,,
|
||||
jwt/__pycache__/algorithms.cpython-311.pyc,,
|
||||
jwt/__pycache__/api_jwk.cpython-311.pyc,,
|
||||
jwt/__pycache__/api_jws.cpython-311.pyc,,
|
||||
jwt/__pycache__/api_jwt.cpython-311.pyc,,
|
||||
jwt/__pycache__/exceptions.cpython-311.pyc,,
|
||||
jwt/__pycache__/help.cpython-311.pyc,,
|
||||
jwt/__pycache__/jwk_set_cache.cpython-311.pyc,,
|
||||
jwt/__pycache__/jwks_client.cpython-311.pyc,,
|
||||
jwt/__pycache__/types.cpython-311.pyc,,
|
||||
jwt/__pycache__/utils.cpython-311.pyc,,
|
||||
jwt/__pycache__/warnings.cpython-311.pyc,,
|
||||
jwt/algorithms.py,sha256=cKr-XEioe0mBtqJMCaHEswqVOA1Z8Purt5Sb3Bi-5BE,30409
|
||||
jwt/api_jwk.py,sha256=6F1r7rmm8V5qEnBKA_xMjS9R7VoANe1_BL1oD2FrAjE,4451
|
||||
jwt/api_jws.py,sha256=aM8vzqQf6mRrAw7bRy-Moj_pjWsKSVQyYK896AfMjJU,11762
|
||||
jwt/api_jwt.py,sha256=OGT4hok1l5A6FH_KdcrU5g6u6EQ8B7em0r9kGM9SYgA,14512
|
||||
jwt/exceptions.py,sha256=bUIOJ-v9tjopTLS-FYOTc3kFx5WP5IZt7ksN_HE1G9Q,1211
|
||||
jwt/help.py,sha256=vFdNzjQoAch04XCMYpCkyB2blaqHAGAqQrtf9nSPkdk,1808
|
||||
jwt/jwk_set_cache.py,sha256=hBKmN-giU7-G37L_XKgc_OZu2ah4wdbj1ZNG_GkoSE8,959
|
||||
jwt/jwks_client.py,sha256=p9b-IbQqo2tEge9Zit3oSPBFNePqwho96VLbnUrHUWs,4259
|
||||
jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jwt/types.py,sha256=VnhGv_VFu5a7_mrPoSCB7HaNLrJdhM8Sq1sSfEg0gLU,99
|
||||
jwt/utils.py,sha256=hxOjvDBheBYhz-RIPiEz7Q88dSUSTMzEdKE_Ww2VdJw,3640
|
||||
jwt/warnings.py,sha256=50XWOnyNsIaqzUJTk6XHNiIDykiL763GYA92MjTKmok,59
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.6.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
jwt
|
||||
Binary file not shown.
BIN
venv/lib/python3.11/site-packages/_cffi_backend.cpython-311-x86_64-linux-gnu.so
Executable file
BIN
venv/lib/python3.11/site-packages/_cffi_backend.cpython-311-x86_64-linux-gnu.so
Executable file
Binary file not shown.
222
venv/lib/python3.11/site-packages/_distutils_hack/__init__.py
Normal file
222
venv/lib/python3.11/site-packages/_distutils_hack/__init__.py
Normal file
@@ -0,0 +1,222 @@
|
||||
# don't import any costly modules
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||
|
||||
|
||||
def warn_distutils_present():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
if is_pypy and sys.version_info < (3, 7):
|
||||
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||
return
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||
"using distutils directly, ensure that setuptools is installed in the "
|
||||
"traditional way (e.g. not an editable install), and/or make sure "
|
||||
"that setuptools is always imported before distutils."
|
||||
)
|
||||
|
||||
|
||||
def clear_distutils():
|
||||
if 'distutils' not in sys.modules:
|
||||
return
|
||||
import warnings
|
||||
|
||||
warnings.warn("Setuptools is replacing distutils.")
|
||||
mods = [
|
||||
name
|
||||
for name in sys.modules
|
||||
if name == "distutils" or name.startswith("distutils.")
|
||||
]
|
||||
for name in mods:
|
||||
del sys.modules[name]
|
||||
|
||||
|
||||
def enabled():
|
||||
"""
|
||||
Allow selection of distutils by environment variable.
|
||||
"""
|
||||
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
||||
return which == 'local'
|
||||
|
||||
|
||||
def ensure_local_distutils():
|
||||
import importlib
|
||||
|
||||
clear_distutils()
|
||||
|
||||
# With the DistutilsMetaFinder in place,
|
||||
# perform an import to cause distutils to be
|
||||
# loaded from setuptools._distutils. Ref #2906.
|
||||
with shim():
|
||||
importlib.import_module('distutils')
|
||||
|
||||
# check that submodules load as expected
|
||||
core = importlib.import_module('distutils.core')
|
||||
assert '_distutils' in core.__file__, core.__file__
|
||||
assert 'setuptools._distutils.log' not in sys.modules
|
||||
|
||||
|
||||
def do_override():
|
||||
"""
|
||||
Ensure that the local copy of distutils is preferred over stdlib.
|
||||
|
||||
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||
for more motivation.
|
||||
"""
|
||||
if enabled():
|
||||
warn_distutils_present()
|
||||
ensure_local_distutils()
|
||||
|
||||
|
||||
class _TrivialRe:
|
||||
def __init__(self, *patterns):
|
||||
self._patterns = patterns
|
||||
|
||||
def match(self, string):
|
||||
return all(pat in string for pat in self._patterns)
|
||||
|
||||
|
||||
class DistutilsMetaFinder:
|
||||
def find_spec(self, fullname, path, target=None):
|
||||
# optimization: only consider top level modules and those
|
||||
# found in the CPython test suite.
|
||||
if path is not None and not fullname.startswith('test.'):
|
||||
return
|
||||
|
||||
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||
method = getattr(self, method_name, lambda: None)
|
||||
return method()
|
||||
|
||||
def spec_for_distutils(self):
|
||||
if self.is_cpython():
|
||||
return
|
||||
|
||||
import importlib
|
||||
import importlib.abc
|
||||
import importlib.util
|
||||
|
||||
try:
|
||||
mod = importlib.import_module('setuptools._distutils')
|
||||
except Exception:
|
||||
# There are a couple of cases where setuptools._distutils
|
||||
# may not be present:
|
||||
# - An older Setuptools without a local distutils is
|
||||
# taking precedence. Ref #2957.
|
||||
# - Path manipulation during sitecustomize removes
|
||||
# setuptools from the path but only after the hook
|
||||
# has been loaded. Ref #2980.
|
||||
# In either case, fall back to stdlib behavior.
|
||||
return
|
||||
|
||||
class DistutilsLoader(importlib.abc.Loader):
|
||||
def create_module(self, spec):
|
||||
mod.__name__ = 'distutils'
|
||||
return mod
|
||||
|
||||
def exec_module(self, module):
|
||||
pass
|
||||
|
||||
return importlib.util.spec_from_loader(
|
||||
'distutils', DistutilsLoader(), origin=mod.__file__
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def is_cpython():
|
||||
"""
|
||||
Suppress supplying distutils for CPython (build and tests).
|
||||
Ref #2965 and #3007.
|
||||
"""
|
||||
return os.path.isfile('pybuilddir.txt')
|
||||
|
||||
def spec_for_pip(self):
|
||||
"""
|
||||
Ensure stdlib distutils when running under pip.
|
||||
See pypa/pip#8761 for rationale.
|
||||
"""
|
||||
if self.pip_imported_during_build():
|
||||
return
|
||||
clear_distutils()
|
||||
self.spec_for_distutils = lambda: None
|
||||
|
||||
@classmethod
|
||||
def pip_imported_during_build(cls):
|
||||
"""
|
||||
Detect if pip is being imported in a build script. Ref #2355.
|
||||
"""
|
||||
import traceback
|
||||
|
||||
return any(
|
||||
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def frame_file_is_setup(frame):
|
||||
"""
|
||||
Return True if the indicated frame suggests a setup.py file.
|
||||
"""
|
||||
# some frames may not have __file__ (#2940)
|
||||
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
||||
|
||||
def spec_for_sensitive_tests(self):
|
||||
"""
|
||||
Ensure stdlib distutils when running select tests under CPython.
|
||||
|
||||
python/cpython#91169
|
||||
"""
|
||||
clear_distutils()
|
||||
self.spec_for_distutils = lambda: None
|
||||
|
||||
sensitive_tests = (
|
||||
[
|
||||
'test.test_distutils',
|
||||
'test.test_peg_generator',
|
||||
'test.test_importlib',
|
||||
]
|
||||
if sys.version_info < (3, 10)
|
||||
else [
|
||||
'test.test_distutils',
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
for name in DistutilsMetaFinder.sensitive_tests:
|
||||
setattr(
|
||||
DistutilsMetaFinder,
|
||||
f'spec_for_{name}',
|
||||
DistutilsMetaFinder.spec_for_sensitive_tests,
|
||||
)
|
||||
|
||||
|
||||
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||
|
||||
|
||||
def add_shim():
|
||||
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
||||
|
||||
|
||||
class shim:
|
||||
def __enter__(self):
|
||||
insert_shim()
|
||||
|
||||
def __exit__(self, exc, value, tb):
|
||||
remove_shim()
|
||||
|
||||
|
||||
def insert_shim():
|
||||
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||
|
||||
|
||||
def remove_shim():
|
||||
try:
|
||||
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||
except ValueError:
|
||||
pass
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
__import__('_distutils_hack').do_override()
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,295 @@
|
||||
Metadata-Version: 2.3
|
||||
Name: annotated-types
|
||||
Version: 0.7.0
|
||||
Summary: Reusable constraint types to use with typing.Annotated
|
||||
Project-URL: Homepage, https://github.com/annotated-types/annotated-types
|
||||
Project-URL: Source, https://github.com/annotated-types/annotated-types
|
||||
Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases
|
||||
Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin <s@muelcolvin.com>, Zac Hatfield-Dodds <zac@zhd.dev>
|
||||
License-File: LICENSE
|
||||
Classifier: Development Status :: 4 - Beta
|
||||
Classifier: Environment :: Console
|
||||
Classifier: Environment :: MacOS X
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Intended Audience :: Information Technology
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Operating System :: POSIX :: Linux
|
||||
Classifier: Operating System :: Unix
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Typing :: Typed
|
||||
Requires-Python: >=3.8
|
||||
Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# annotated-types
|
||||
|
||||
[](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
|
||||
[](https://pypi.python.org/pypi/annotated-types)
|
||||
[](https://github.com/annotated-types/annotated-types)
|
||||
[](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)
|
||||
|
||||
[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
|
||||
adding context-specific metadata to existing types, and specifies that
|
||||
`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
|
||||
logic for `x`.
|
||||
|
||||
This package provides metadata objects which can be used to represent common
|
||||
constraints such as upper and lower bounds on scalar values and collection sizes,
|
||||
a `Predicate` marker for runtime checks, and
|
||||
descriptions of how we intend these metadata to be interpreted. In some cases,
|
||||
we also note alternative representations which do not require this package.
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
pip install annotated-types
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```python
|
||||
from typing import Annotated
|
||||
from annotated_types import Gt, Len, Predicate
|
||||
|
||||
class MyClass:
|
||||
age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
|
||||
# Invalid: 17, 18, "19", 19.0, ...
|
||||
factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
|
||||
# Invalid: 4, 8, -2, 5.0, "prime", ...
|
||||
|
||||
my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
|
||||
# Invalid: (1, 2), ["abc"], [0] * 20
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
_While `annotated-types` avoids runtime checks for performance, users should not
|
||||
construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
|
||||
Downstream implementors may choose to raise an error, emit a warning, silently ignore
|
||||
a metadata item, etc., if the metadata objects described below are used with an
|
||||
incompatible type - or for any other reason!_
|
||||
|
||||
### Gt, Ge, Lt, Le
|
||||
|
||||
Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
|
||||
dates, times, strings, sets, etc. Note that the boundary value need not be of the
|
||||
same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
|
||||
is fine, for example, and implies that the value is an integer x such that `x > 1.5`.
|
||||
|
||||
We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
|
||||
as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
|
||||
the `annotated-types` package.
|
||||
|
||||
To be explicit, these types have the following meanings:
|
||||
|
||||
* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
|
||||
* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
|
||||
* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
|
||||
* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum
|
||||
|
||||
### Interval
|
||||
|
||||
`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
|
||||
metadata object. `None` attributes should be ignored, and non-`None` attributes
|
||||
treated as per the single bounds above.
|
||||
|
||||
### MultipleOf
|
||||
|
||||
`MultipleOf(multiple_of=x)` might be interpreted in two ways:
|
||||
|
||||
1. Python semantics, implying `value % multiple_of == 0`, or
|
||||
2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
|
||||
where `int(value / multiple_of) == value / multiple_of`.
|
||||
|
||||
We encourage users to be aware of these two common interpretations and their
|
||||
distinct behaviours, especially since very large or non-integer numbers make
|
||||
it easy to cause silent data corruption due to floating-point imprecision.
|
||||
|
||||
We encourage libraries to carefully document which interpretation they implement.
|
||||
|
||||
### MinLen, MaxLen, Len
|
||||
|
||||
`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.
|
||||
|
||||
As well as `Len()` which can optionally include upper and lower bounds, we also
|
||||
provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
|
||||
and `Len(max_length=y)` respectively.
|
||||
|
||||
`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.
|
||||
|
||||
Examples of usage:
|
||||
|
||||
* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
|
||||
* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
|
||||
* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
|
||||
* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
|
||||
* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8
|
||||
|
||||
#### Changed in v0.4.0
|
||||
|
||||
* `min_inclusive` has been renamed to `min_length`, no change in meaning
|
||||
* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
|
||||
* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
|
||||
meaning of the upper bound in slices vs. `Len`
|
||||
|
||||
See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.
|
||||
|
||||
### Timezone
|
||||
|
||||
`Timezone` can be used with a `datetime` or a `time` to express which timezones
|
||||
are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
|
||||
`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
|
||||
expresses that any timezone-aware datetime is allowed. You may also pass a specific
|
||||
timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects)
|
||||
object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only
|
||||
allow a specific timezone, though we note that this is often a symptom of fragile design.
|
||||
|
||||
#### Changed in v0.x.x
|
||||
|
||||
* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of
|
||||
`timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries.
|
||||
|
||||
### Unit
|
||||
|
||||
`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of
|
||||
a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]`
|
||||
would be a float representing a velocity in meters per second.
|
||||
|
||||
Please note that `annotated_types` itself makes no attempt to parse or validate
|
||||
the unit string in any way. That is left entirely to downstream libraries,
|
||||
such as [`pint`](https://pint.readthedocs.io) or
|
||||
[`astropy.units`](https://docs.astropy.org/en/stable/units/).
|
||||
|
||||
An example of how a library might use this metadata:
|
||||
|
||||
```python
|
||||
from annotated_types import Unit
|
||||
from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args
|
||||
|
||||
# given a type annotated with a unit:
|
||||
Meters = Annotated[float, Unit("m")]
|
||||
|
||||
|
||||
# you can cast the annotation to a specific unit type with any
|
||||
# callable that accepts a string and returns the desired type
|
||||
T = TypeVar("T")
|
||||
def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None:
|
||||
if get_origin(tp) is Annotated:
|
||||
for arg in get_args(tp):
|
||||
if isinstance(arg, Unit):
|
||||
return unit_cls(arg.unit)
|
||||
return None
|
||||
|
||||
|
||||
# using `pint`
|
||||
import pint
|
||||
pint_unit = cast_unit(Meters, pint.Unit)
|
||||
|
||||
|
||||
# using `astropy.units`
|
||||
import astropy.units as u
|
||||
astropy_unit = cast_unit(Meters, u.Unit)
|
||||
```
|
||||
|
||||
### Predicate
|
||||
|
||||
`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
|
||||
Users should prefer the statically inspectable metadata above, but if you need
|
||||
the full power and flexibility of arbitrary runtime predicates... here it is.
|
||||
|
||||
For some common constraints, we provide generic types:
|
||||
|
||||
* `IsLower = Annotated[T, Predicate(str.islower)]`
|
||||
* `IsUpper = Annotated[T, Predicate(str.isupper)]`
|
||||
* `IsDigit = Annotated[T, Predicate(str.isdigit)]`
|
||||
* `IsFinite = Annotated[T, Predicate(math.isfinite)]`
|
||||
* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]`
|
||||
* `IsNan = Annotated[T, Predicate(math.isnan)]`
|
||||
* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]`
|
||||
* `IsInfinite = Annotated[T, Predicate(math.isinf)]`
|
||||
* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]`
|
||||
|
||||
so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer
|
||||
(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`.
|
||||
|
||||
Some libraries might have special logic to handle known or understandable predicates,
|
||||
for example by checking for `str.isdigit` and using its presence to both call custom
|
||||
logic to enforce digit-only strings, and customise some generated external schema.
|
||||
Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
|
||||
favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.
|
||||
|
||||
To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner.
|
||||
|
||||
We do not specify what behaviour should be expected for predicates that raise
|
||||
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||
and then propagate or discard the resulting
|
||||
`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
|
||||
exception. We encourage libraries to document the behaviour they choose.
|
||||
|
||||
### Doc
|
||||
|
||||
`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used.
|
||||
|
||||
It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools.
|
||||
|
||||
It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`.
|
||||
|
||||
This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md).
|
||||
|
||||
### Integrating downstream types with `GroupedMetadata`
|
||||
|
||||
Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
|
||||
This can help reduce verbosity and cognitive overhead for users.
|
||||
For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:
|
||||
|
||||
```python
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterator
|
||||
from annotated_types import GroupedMetadata, Ge
|
||||
|
||||
@dataclass
|
||||
class Field(GroupedMetadata):
|
||||
ge: int | None = None
|
||||
description: str | None = None
|
||||
|
||||
def __iter__(self) -> Iterator[object]:
|
||||
# Iterating over a GroupedMetadata object should yield annotated-types
|
||||
# constraint metadata objects which describe it as fully as possible,
|
||||
# and may include other unknown objects too.
|
||||
if self.ge is not None:
|
||||
yield Ge(self.ge)
|
||||
if self.description is not None:
|
||||
yield Description(self.description)
|
||||
```
|
||||
|
||||
Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.
|
||||
|
||||
Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.
|
||||
|
||||
Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.
|
||||
|
||||
### Consuming metadata
|
||||
|
||||
We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).
|
||||
|
||||
It is up to the implementer to determine how this metadata is used.
|
||||
You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.
|
||||
|
||||
## Design & History
|
||||
|
||||
This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
|
||||
and Hypothesis, with the goal of making it as easy as possible for end-users to
|
||||
provide more informative annotations for use by runtime libraries.
|
||||
|
||||
It is deliberately minimal, and following PEP-593 allows considerable downstream
|
||||
discretion in what (if anything!) they choose to support. Nonetheless, we expect
|
||||
that staying simple and covering _only_ the most common use-cases will give users
|
||||
and maintainers the best experience we can. If you'd like more constraints for your
|
||||
types - follow our lead, by defining them and documenting them downstream!
|
||||
@@ -0,0 +1,10 @@
|
||||
annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046
|
||||
annotated_types-0.7.0.dist-info/RECORD,,
|
||||
annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
|
||||
annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
|
||||
annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819
|
||||
annotated_types/__pycache__/__init__.cpython-311.pyc,,
|
||||
annotated_types/__pycache__/test_cases.cpython-311.pyc,,
|
||||
annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421
|
||||
@@ -0,0 +1,4 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: hatchling 1.24.2
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2022 the contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
432
venv/lib/python3.11/site-packages/annotated_types/__init__.py
Normal file
432
venv/lib/python3.11/site-packages/annotated_types/__init__.py
Normal file
@@ -0,0 +1,432 @@
|
||||
import math
|
||||
import sys
|
||||
import types
|
||||
from dataclasses import dataclass
|
||||
from datetime import tzinfo
|
||||
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Protocol, runtime_checkable
|
||||
else:
|
||||
from typing import Protocol, runtime_checkable
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from typing_extensions import Annotated, Literal
|
||||
else:
|
||||
from typing import Annotated, Literal
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
EllipsisType = type(Ellipsis)
|
||||
KW_ONLY = {}
|
||||
SLOTS = {}
|
||||
else:
|
||||
from types import EllipsisType
|
||||
|
||||
KW_ONLY = {"kw_only": True}
|
||||
SLOTS = {"slots": True}
|
||||
|
||||
|
||||
__all__ = (
|
||||
'BaseMetadata',
|
||||
'GroupedMetadata',
|
||||
'Gt',
|
||||
'Ge',
|
||||
'Lt',
|
||||
'Le',
|
||||
'Interval',
|
||||
'MultipleOf',
|
||||
'MinLen',
|
||||
'MaxLen',
|
||||
'Len',
|
||||
'Timezone',
|
||||
'Predicate',
|
||||
'LowerCase',
|
||||
'UpperCase',
|
||||
'IsDigits',
|
||||
'IsFinite',
|
||||
'IsNotFinite',
|
||||
'IsNan',
|
||||
'IsNotNan',
|
||||
'IsInfinite',
|
||||
'IsNotInfinite',
|
||||
'doc',
|
||||
'DocInfo',
|
||||
'__version__',
|
||||
)
|
||||
|
||||
__version__ = '0.7.0'
|
||||
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
|
||||
# arguments that start with __ are considered
|
||||
# positional only
|
||||
# see https://peps.python.org/pep-0484/#positional-only-arguments
|
||||
|
||||
|
||||
class SupportsGt(Protocol):
|
||||
def __gt__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsGe(Protocol):
|
||||
def __ge__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsLt(Protocol):
|
||||
def __lt__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsLe(Protocol):
|
||||
def __le__(self: T, __other: T) -> bool:
|
||||
...
|
||||
|
||||
|
||||
class SupportsMod(Protocol):
|
||||
def __mod__(self: T, __other: T) -> T:
|
||||
...
|
||||
|
||||
|
||||
class SupportsDiv(Protocol):
|
||||
def __div__(self: T, __other: T) -> T:
|
||||
...
|
||||
|
||||
|
||||
class BaseMetadata:
|
||||
"""Base class for all metadata.
|
||||
|
||||
This exists mainly so that implementers
|
||||
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Gt(BaseMetadata):
|
||||
"""Gt(gt=x) implies that the value must be greater than x.
|
||||
|
||||
It can be used with any type that supports the ``>`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
gt: SupportsGt
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Ge(BaseMetadata):
|
||||
"""Ge(ge=x) implies that the value must be greater than or equal to x.
|
||||
|
||||
It can be used with any type that supports the ``>=`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
ge: SupportsGe
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Lt(BaseMetadata):
|
||||
"""Lt(lt=x) implies that the value must be less than x.
|
||||
|
||||
It can be used with any type that supports the ``<`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
lt: SupportsLt
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Le(BaseMetadata):
|
||||
"""Le(le=x) implies that the value must be less than or equal to x.
|
||||
|
||||
It can be used with any type that supports the ``<=`` operator,
|
||||
including numbers, dates and times, strings, sets, and so on.
|
||||
"""
|
||||
|
||||
le: SupportsLe
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class GroupedMetadata(Protocol):
|
||||
"""A grouping of multiple objects, like typing.Unpack.
|
||||
|
||||
`GroupedMetadata` on its own is not metadata and has no meaning.
|
||||
All of the constraints and metadata should be fully expressable
|
||||
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
|
||||
|
||||
Concrete implementations should override `GroupedMetadata.__iter__()`
|
||||
to add their own metadata.
|
||||
For example:
|
||||
|
||||
>>> @dataclass
|
||||
>>> class Field(GroupedMetadata):
|
||||
>>> gt: float | None = None
|
||||
>>> description: str | None = None
|
||||
...
|
||||
>>> def __iter__(self) -> Iterable[object]:
|
||||
>>> if self.gt is not None:
|
||||
>>> yield Gt(self.gt)
|
||||
>>> if self.description is not None:
|
||||
>>> yield Description(self.gt)
|
||||
|
||||
Also see the implementation of `Interval` below for an example.
|
||||
|
||||
Parsers should recognize this and unpack it so that it can be used
|
||||
both with and without unpacking:
|
||||
|
||||
- `Annotated[int, Field(...)]` (parser must unpack Field)
|
||||
- `Annotated[int, *Field(...)]` (PEP-646)
|
||||
""" # noqa: trailing-whitespace
|
||||
|
||||
@property
|
||||
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
|
||||
return True
|
||||
|
||||
def __iter__(self) -> Iterator[object]:
|
||||
...
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
__slots__ = () # allow subclasses to use slots
|
||||
|
||||
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
|
||||
# Basic ABC like functionality without the complexity of an ABC
|
||||
super().__init_subclass__(*args, **kwargs)
|
||||
if cls.__iter__ is GroupedMetadata.__iter__:
|
||||
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
|
||||
|
||||
def __iter__(self) -> Iterator[object]: # noqa: F811
|
||||
raise NotImplementedError # more helpful than "None has no attribute..." type errors
|
||||
|
||||
|
||||
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
|
||||
class Interval(GroupedMetadata):
|
||||
"""Interval can express inclusive or exclusive bounds with a single object.
|
||||
|
||||
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
|
||||
are interpreted the same way as the single-bound constraints.
|
||||
"""
|
||||
|
||||
gt: Union[SupportsGt, None] = None
|
||||
ge: Union[SupportsGe, None] = None
|
||||
lt: Union[SupportsLt, None] = None
|
||||
le: Union[SupportsLe, None] = None
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
"""Unpack an Interval into zero or more single-bounds."""
|
||||
if self.gt is not None:
|
||||
yield Gt(self.gt)
|
||||
if self.ge is not None:
|
||||
yield Ge(self.ge)
|
||||
if self.lt is not None:
|
||||
yield Lt(self.lt)
|
||||
if self.le is not None:
|
||||
yield Le(self.le)
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MultipleOf(BaseMetadata):
|
||||
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
|
||||
|
||||
1. Python semantics, implying ``value % multiple_of == 0``, or
|
||||
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
|
||||
|
||||
We encourage users to be aware of these two common interpretations,
|
||||
and libraries to carefully document which they implement.
|
||||
"""
|
||||
|
||||
multiple_of: Union[SupportsDiv, SupportsMod]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MinLen(BaseMetadata):
|
||||
"""
|
||||
MinLen() implies minimum inclusive length,
|
||||
e.g. ``len(value) >= min_length``.
|
||||
"""
|
||||
|
||||
min_length: Annotated[int, Ge(0)]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class MaxLen(BaseMetadata):
|
||||
"""
|
||||
MaxLen() implies maximum inclusive length,
|
||||
e.g. ``len(value) <= max_length``.
|
||||
"""
|
||||
|
||||
max_length: Annotated[int, Ge(0)]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Len(GroupedMetadata):
|
||||
"""
|
||||
Len() implies that ``min_length <= len(value) <= max_length``.
|
||||
|
||||
Upper bound may be omitted or ``None`` to indicate no upper length bound.
|
||||
"""
|
||||
|
||||
min_length: Annotated[int, Ge(0)] = 0
|
||||
max_length: Optional[Annotated[int, Ge(0)]] = None
|
||||
|
||||
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||
"""Unpack a Len into zone or more single-bounds."""
|
||||
if self.min_length > 0:
|
||||
yield MinLen(self.min_length)
|
||||
if self.max_length is not None:
|
||||
yield MaxLen(self.max_length)
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Timezone(BaseMetadata):
|
||||
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
|
||||
|
||||
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
|
||||
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
|
||||
tz-aware but any timezone is allowed.
|
||||
|
||||
You may also pass a specific timezone string or tzinfo object such as
|
||||
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
|
||||
you only allow a specific timezone, though we note that this is often
|
||||
a symptom of poor design.
|
||||
"""
|
||||
|
||||
tz: Union[str, tzinfo, EllipsisType, None]
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Unit(BaseMetadata):
|
||||
"""Indicates that the value is a physical quantity with the specified unit.
|
||||
|
||||
It is intended for usage with numeric types, where the value represents the
|
||||
magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]``
|
||||
or ``speed: Annotated[float, Unit('m/s')]``.
|
||||
|
||||
Interpretation of the unit string is left to the discretion of the consumer.
|
||||
It is suggested to follow conventions established by python libraries that work
|
||||
with physical quantities, such as
|
||||
|
||||
- ``pint`` : <https://pint.readthedocs.io/en/stable/>
|
||||
- ``astropy.units``: <https://docs.astropy.org/en/stable/units/>
|
||||
|
||||
For indicating a quantity with a certain dimensionality but without a specific unit
|
||||
it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`.
|
||||
Note, however, ``annotated_types`` itself makes no use of the unit string.
|
||||
"""
|
||||
|
||||
unit: str
|
||||
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class Predicate(BaseMetadata):
|
||||
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
|
||||
|
||||
Users should prefer statically inspectable metadata, but if you need the full
|
||||
power and flexibility of arbitrary runtime predicates... here it is.
|
||||
|
||||
We provide a few predefined predicates for common string constraints:
|
||||
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
|
||||
``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which
|
||||
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
|
||||
|
||||
Some libraries might have special logic to handle certain predicates, e.g. by
|
||||
checking for `str.isdigit` and using its presence to both call custom logic to
|
||||
enforce digit-only strings, and customise some generated external schema.
|
||||
|
||||
We do not specify what behaviour should be expected for predicates that raise
|
||||
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||
and then propagate or discard the resulting exception.
|
||||
"""
|
||||
|
||||
func: Callable[[Any], bool]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
if getattr(self.func, "__name__", "<lambda>") == "<lambda>":
|
||||
return f"{self.__class__.__name__}({self.func!r})"
|
||||
if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and (
|
||||
namespace := getattr(self.func.__self__, "__name__", None)
|
||||
):
|
||||
return f"{self.__class__.__name__}({namespace}.{self.func.__name__})"
|
||||
if isinstance(self.func, type(str.isascii)): # method descriptor
|
||||
return f"{self.__class__.__name__}({self.func.__qualname__})"
|
||||
return f"{self.__class__.__name__}({self.func.__name__})"
|
||||
|
||||
|
||||
@dataclass
|
||||
class Not:
|
||||
func: Callable[[Any], bool]
|
||||
|
||||
def __call__(self, __v: Any) -> bool:
|
||||
return not self.func(__v)
|
||||
|
||||
|
||||
_StrType = TypeVar("_StrType", bound=str)
|
||||
|
||||
LowerCase = Annotated[_StrType, Predicate(str.islower)]
|
||||
"""
|
||||
Return True if the string is a lowercase string, False otherwise.
|
||||
|
||||
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
|
||||
""" # noqa: E501
|
||||
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
|
||||
"""
|
||||
Return True if the string is an uppercase string, False otherwise.
|
||||
|
||||
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
|
||||
""" # noqa: E501
|
||||
IsDigit = Annotated[_StrType, Predicate(str.isdigit)]
|
||||
IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63
|
||||
"""
|
||||
Return True if the string is a digit string, False otherwise.
|
||||
|
||||
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
|
||||
""" # noqa: E501
|
||||
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
|
||||
"""
|
||||
Return True if all characters in the string are ASCII, False otherwise.
|
||||
|
||||
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
|
||||
"""
|
||||
|
||||
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
|
||||
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
|
||||
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
|
||||
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
|
||||
"""Return True if x is one of infinity or NaN, and False otherwise"""
|
||||
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
|
||||
"""Return True if x is a NaN (not a number), and False otherwise."""
|
||||
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
|
||||
"""Return True if x is anything but NaN (not a number), and False otherwise."""
|
||||
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
|
||||
"""Return True if x is a positive or negative infinity, and False otherwise."""
|
||||
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
|
||||
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
|
||||
|
||||
try:
|
||||
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
|
||||
except ImportError:
|
||||
|
||||
@dataclass(frozen=True, **SLOTS)
|
||||
class DocInfo: # type: ignore [no-redef]
|
||||
""" "
|
||||
The return value of doc(), mainly to be used by tools that want to extract the
|
||||
Annotated documentation at runtime.
|
||||
"""
|
||||
|
||||
documentation: str
|
||||
"""The documentation string passed to doc()."""
|
||||
|
||||
def doc(
|
||||
documentation: str,
|
||||
) -> DocInfo:
|
||||
"""
|
||||
Add documentation to a type annotation inside of Annotated.
|
||||
|
||||
For example:
|
||||
|
||||
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
|
||||
"""
|
||||
return DocInfo(documentation)
|
||||
Binary file not shown.
Binary file not shown.
151
venv/lib/python3.11/site-packages/annotated_types/test_cases.py
Normal file
151
venv/lib/python3.11/site-packages/annotated_types/test_cases.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import math
|
||||
import sys
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from typing_extensions import Annotated
|
||||
else:
|
||||
from typing import Annotated
|
||||
|
||||
import annotated_types as at
|
||||
|
||||
|
||||
class Case(NamedTuple):
|
||||
"""
|
||||
A test case for `annotated_types`.
|
||||
"""
|
||||
|
||||
annotation: Any
|
||||
valid_cases: Iterable[Any]
|
||||
invalid_cases: Iterable[Any]
|
||||
|
||||
|
||||
def cases() -> Iterable[Case]:
|
||||
# Gt, Ge, Lt, Le
|
||||
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
|
||||
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(date(2000, 1, 1))],
|
||||
[date(2000, 1, 2), date(2000, 1, 3)],
|
||||
[date(2000, 1, 1), date(1999, 12, 31)],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Gt(Decimal('1.123'))],
|
||||
[Decimal('1.1231'), Decimal('123')],
|
||||
[Decimal('1.123'), Decimal('0')],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
|
||||
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
|
||||
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
|
||||
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
|
||||
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
|
||||
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
)
|
||||
|
||||
# Interval
|
||||
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
|
||||
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
|
||||
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
|
||||
yield Case(
|
||||
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
|
||||
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
|
||||
)
|
||||
|
||||
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
|
||||
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
|
||||
|
||||
# lengths
|
||||
|
||||
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||
|
||||
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
|
||||
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
|
||||
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||
|
||||
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
|
||||
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
|
||||
|
||||
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
|
||||
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
|
||||
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
|
||||
|
||||
# Timezone
|
||||
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone(timezone.utc)],
|
||||
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||
)
|
||||
yield Case(
|
||||
Annotated[datetime, at.Timezone('Europe/London')],
|
||||
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
|
||||
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||
)
|
||||
|
||||
# Quantity
|
||||
|
||||
yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m'))
|
||||
|
||||
# predicate types
|
||||
|
||||
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
|
||||
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
|
||||
yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2'])
|
||||
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
|
||||
|
||||
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
|
||||
|
||||
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
|
||||
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
|
||||
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
|
||||
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
|
||||
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
|
||||
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
|
||||
|
||||
# check stacked predicates
|
||||
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
|
||||
|
||||
# doc
|
||||
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
|
||||
|
||||
# custom GroupedMetadata
|
||||
class MyCustomGroupedMetadata(at.GroupedMetadata):
|
||||
def __iter__(self) -> Iterator[at.Predicate]:
|
||||
yield at.Predicate(lambda x: float(x).is_integer())
|
||||
|
||||
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])
|
||||
@@ -0,0 +1 @@
|
||||
pip
|
||||
@@ -0,0 +1,96 @@
|
||||
Metadata-Version: 2.4
|
||||
Name: anyio
|
||||
Version: 4.12.1
|
||||
Summary: High-level concurrency and networking framework on top of asyncio or Trio
|
||||
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
|
||||
License-Expression: MIT
|
||||
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
|
||||
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
|
||||
Project-URL: Source code, https://github.com/agronholm/anyio
|
||||
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Framework :: AnyIO
|
||||
Classifier: Typing :: Typed
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: 3.14
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11"
|
||||
Requires-Dist: idna>=2.8
|
||||
Requires-Dist: typing_extensions>=4.5; python_version < "3.13"
|
||||
Provides-Extra: trio
|
||||
Requires-Dist: trio>=0.32.0; python_version >= "3.10" and extra == "trio"
|
||||
Requires-Dist: trio>=0.31.0; python_version < "3.10" and extra == "trio"
|
||||
Dynamic: license-file
|
||||
|
||||
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
|
||||
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
|
||||
:alt: Build Status
|
||||
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/agronholm/anyio?branch=master
|
||||
:alt: Code Coverage
|
||||
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
|
||||
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
|
||||
:alt: Documentation
|
||||
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
|
||||
:target: https://gitter.im/python-trio/AnyIO
|
||||
:alt: Gitter chat
|
||||
|
||||
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
|
||||
Trio_. It implements Trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
|
||||
with the native SC of Trio itself.
|
||||
|
||||
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
|
||||
Trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
|
||||
refactoring necessary. It will blend in with the native libraries of your chosen backend.
|
||||
|
||||
To find out why you might want to use AnyIO's APIs instead of asyncio's, you can read about it
|
||||
`here <https://anyio.readthedocs.io/en/stable/why.html>`_.
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
View full documentation at: https://anyio.readthedocs.io/
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
AnyIO offers the following functionality:
|
||||
|
||||
* Task groups (nurseries_ in trio terminology)
|
||||
* High-level networking (TCP, UDP and UNIX sockets)
|
||||
|
||||
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
|
||||
3.8)
|
||||
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
|
||||
Protocols)
|
||||
|
||||
* A versatile API for byte streams and object streams
|
||||
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
|
||||
streams)
|
||||
* Worker threads
|
||||
* Subprocesses
|
||||
* Subinterpreter support for code parallelization (on Python 3.13 and later)
|
||||
* Asynchronous file I/O (using worker threads)
|
||||
* Signal handling
|
||||
* Asynchronous version of the functools_ module
|
||||
|
||||
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
|
||||
It even works with the popular Hypothesis_ library.
|
||||
|
||||
.. _asyncio: https://docs.python.org/3/library/asyncio.html
|
||||
.. _Trio: https://github.com/python-trio/trio
|
||||
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
||||
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
|
||||
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
|
||||
.. _pytest: https://docs.pytest.org/en/latest/
|
||||
.. _functools: https://docs.python.org/3/library/functools.html
|
||||
.. _Hypothesis: https://hypothesis.works/
|
||||
@@ -0,0 +1,92 @@
|
||||
anyio-4.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
anyio-4.12.1.dist-info/METADATA,sha256=DfiDab9Tmmcfy802lOLTMEHJQShkOSbopCwqCYbLuJk,4277
|
||||
anyio-4.12.1.dist-info/RECORD,,
|
||||
anyio-4.12.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
||||
anyio-4.12.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
|
||||
anyio-4.12.1.dist-info/licenses/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
|
||||
anyio-4.12.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
|
||||
anyio/__init__.py,sha256=7iDVqMUprUuKNY91FuoKqayAhR-OY136YDPI6P78HHk,6170
|
||||
anyio/__pycache__/__init__.cpython-311.pyc,,
|
||||
anyio/__pycache__/from_thread.cpython-311.pyc,,
|
||||
anyio/__pycache__/functools.cpython-311.pyc,,
|
||||
anyio/__pycache__/lowlevel.cpython-311.pyc,,
|
||||
anyio/__pycache__/pytest_plugin.cpython-311.pyc,,
|
||||
anyio/__pycache__/to_interpreter.cpython-311.pyc,,
|
||||
anyio/__pycache__/to_process.cpython-311.pyc,,
|
||||
anyio/__pycache__/to_thread.cpython-311.pyc,,
|
||||
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
anyio/_backends/__pycache__/__init__.cpython-311.pyc,,
|
||||
anyio/_backends/__pycache__/_asyncio.cpython-311.pyc,,
|
||||
anyio/_backends/__pycache__/_trio.cpython-311.pyc,,
|
||||
anyio/_backends/_asyncio.py,sha256=xG6qv60mgGnL0mK82dxjH2b8hlkMlJ-x2BqIq3qv70Y,98863
|
||||
anyio/_backends/_trio.py,sha256=30Rctb7lm8g63ZHljVPVnj5aH-uK6oQvphjwUBoAzuI,41456
|
||||
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
anyio/_core/__pycache__/__init__.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_asyncio_selector_thread.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_contextmanagers.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_eventloop.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_exceptions.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_fileio.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_resources.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_signals.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_sockets.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_streams.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_subprocesses.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_synchronization.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_tasks.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_tempfile.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_testing.cpython-311.pyc,,
|
||||
anyio/_core/__pycache__/_typedattr.cpython-311.pyc,,
|
||||
anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626
|
||||
anyio/_core/_contextmanagers.py,sha256=YInBCabiEeS-UaP_Jdxa1CaFC71ETPW8HZTHIM8Rsc8,7215
|
||||
anyio/_core/_eventloop.py,sha256=c2EdcBX-xnKwxPcC4Pjn3_qG9I-x4IWFO2R9RqCGjM4,6448
|
||||
anyio/_core/_exceptions.py,sha256=Y3aq-Wxd7Q2HqwSg7nZPvRsHEuGazv_qeet6gqEBdPk,4407
|
||||
anyio/_core/_fileio.py,sha256=uc7t10Vb-If7GbdWM_zFf-ajUe6uek63fSt7IBLlZW0,25731
|
||||
anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
|
||||
anyio/_core/_signals.py,sha256=mjTBB2hTKNPRlU0IhnijeQedpWOGERDiMjSlJQsFrug,1016
|
||||
anyio/_core/_sockets.py,sha256=RBXHcUqZt5gg_-OOfgHVv8uq2FSKk1uVUzTdpjBoI1o,34977
|
||||
anyio/_core/_streams.py,sha256=FczFwIgDpnkK0bODWJXMpsUJYdvAD04kaUaGzJU8DK0,1806
|
||||
anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047
|
||||
anyio/_core/_synchronization.py,sha256=MgVVqFzvt580tHC31LiOcq1G6aryut--xRG4Ff8KwxQ,20869
|
||||
anyio/_core/_tasks.py,sha256=pVB7K6AAulzUM8YgXAeqNZG44nSyZ1bYJjH8GznC00I,5435
|
||||
anyio/_core/_tempfile.py,sha256=lHb7CW4FyIlpkf5ADAf4VmLHCKwEHF9nxqNyBCFFUiA,19697
|
||||
anyio/_core/_testing.py,sha256=u7MPqGXwpTxqI7hclSdNA30z2GH1Nw258uwKvy_RfBg,2340
|
||||
anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
|
||||
anyio/abc/__init__.py,sha256=6mWhcl_pGXhrgZVHP_TCfMvIXIOp9mroEFM90fYCU_U,2869
|
||||
anyio/abc/__pycache__/__init__.cpython-311.pyc,,
|
||||
anyio/abc/__pycache__/_eventloop.cpython-311.pyc,,
|
||||
anyio/abc/__pycache__/_resources.cpython-311.pyc,,
|
||||
anyio/abc/__pycache__/_sockets.cpython-311.pyc,,
|
||||
anyio/abc/__pycache__/_streams.cpython-311.pyc,,
|
||||
anyio/abc/__pycache__/_subprocesses.cpython-311.pyc,,
|
||||
anyio/abc/__pycache__/_tasks.cpython-311.pyc,,
|
||||
anyio/abc/__pycache__/_testing.cpython-311.pyc,,
|
||||
anyio/abc/_eventloop.py,sha256=GlzgB3UJGgG6Kr7olpjOZ-o00PghecXuofVDQ_5611Q,10749
|
||||
anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
|
||||
anyio/abc/_sockets.py,sha256=ECTY0jLEF18gryANHR3vFzXzGdZ-xPwELq1QdgOb0Jo,13258
|
||||
anyio/abc/_streams.py,sha256=005GKSCXGprxnhucILboSqc2JFovECZk9m3p-qqxXVc,7640
|
||||
anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
|
||||
anyio/abc/_tasks.py,sha256=KC7wrciE48AINOI-AhPutnFhe1ewfP7QnamFlDzqesQ,3721
|
||||
anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
|
||||
anyio/from_thread.py,sha256=L-0w1HxJ6BSb-KuVi57k5Tkc3yzQrx3QK5tAxMPcY-0,19141
|
||||
anyio/functools.py,sha256=HWj7GBEmc0Z-mZg3uok7Z7ZJn0rEC_0Pzbt0nYUDaTQ,10973
|
||||
anyio/lowlevel.py,sha256=AyKLVK3LaWSoK39LkCKxE4_GDMLKZBNqTrLUgk63y80,5158
|
||||
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
anyio/pytest_plugin.py,sha256=3jAFQn0jv_pyoWE2GBBlHaj9sqXj4e8vob0_hgrsXE8,10244
|
||||
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
anyio/streams/__pycache__/__init__.cpython-311.pyc,,
|
||||
anyio/streams/__pycache__/buffered.cpython-311.pyc,,
|
||||
anyio/streams/__pycache__/file.cpython-311.pyc,,
|
||||
anyio/streams/__pycache__/memory.cpython-311.pyc,,
|
||||
anyio/streams/__pycache__/stapled.cpython-311.pyc,,
|
||||
anyio/streams/__pycache__/text.cpython-311.pyc,,
|
||||
anyio/streams/__pycache__/tls.cpython-311.pyc,,
|
||||
anyio/streams/buffered.py,sha256=2R3PeJhe4EXrdYqz44Y6-Eg9R6DrmlsYrP36Ir43-po,6263
|
||||
anyio/streams/file.py,sha256=4WZ7XGz5WNu39FQHvqbe__TQ0HDP9OOhgO1mk9iVpVU,4470
|
||||
anyio/streams/memory.py,sha256=F0zwzvFJKAhX_LRZGoKzzqDC2oMM-f-yyTBrEYEGOaU,10740
|
||||
anyio/streams/stapled.py,sha256=T8Xqwf8K6EgURPxbt1N4i7A8BAk-gScv-GRhjLXIf_o,4390
|
||||
anyio/streams/text.py,sha256=BcVAGJw1VRvtIqnv-o0Rb0pwH7p8vwlvl21xHq522ag,5765
|
||||
anyio/streams/tls.py,sha256=Jpxy0Mfbcp1BxHCwE-YjSSFaLnIBbnnwur-excYThs4,15368
|
||||
anyio/to_interpreter.py,sha256=_mLngrMy97TMR6VbW4Y6YzDUk9ZuPcQMPlkuyRh3C9k,7100
|
||||
anyio/to_process.py,sha256=J7gAA_YOuoHqnpDAf5fm1Qu6kOmTzdFbiDNvnV755vk,9798
|
||||
anyio/to_thread.py,sha256=menEgXYmUV7Fjg_9WqCV95P9MAtQS8BzPGGcWB_QnfQ,2687
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (80.9.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
[pytest11]
|
||||
anyio = anyio.pytest_plugin
|
||||
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2018 Alex Grönholm
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -0,0 +1 @@
|
||||
anyio
|
||||
111
venv/lib/python3.11/site-packages/anyio/__init__.py
Normal file
111
venv/lib/python3.11/site-packages/anyio/__init__.py
Normal file
@@ -0,0 +1,111 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin
|
||||
from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin
|
||||
from ._core._eventloop import current_time as current_time
|
||||
from ._core._eventloop import get_all_backends as get_all_backends
|
||||
from ._core._eventloop import get_available_backends as get_available_backends
|
||||
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
|
||||
from ._core._eventloop import run as run
|
||||
from ._core._eventloop import sleep as sleep
|
||||
from ._core._eventloop import sleep_forever as sleep_forever
|
||||
from ._core._eventloop import sleep_until as sleep_until
|
||||
from ._core._exceptions import BrokenResourceError as BrokenResourceError
|
||||
from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter
|
||||
from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
|
||||
from ._core._exceptions import BusyResourceError as BusyResourceError
|
||||
from ._core._exceptions import ClosedResourceError as ClosedResourceError
|
||||
from ._core._exceptions import ConnectionFailed as ConnectionFailed
|
||||
from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
|
||||
from ._core._exceptions import EndOfStream as EndOfStream
|
||||
from ._core._exceptions import IncompleteRead as IncompleteRead
|
||||
from ._core._exceptions import NoEventLoopError as NoEventLoopError
|
||||
from ._core._exceptions import RunFinishedError as RunFinishedError
|
||||
from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
|
||||
from ._core._exceptions import WouldBlock as WouldBlock
|
||||
from ._core._fileio import AsyncFile as AsyncFile
|
||||
from ._core._fileio import Path as Path
|
||||
from ._core._fileio import open_file as open_file
|
||||
from ._core._fileio import wrap_file as wrap_file
|
||||
from ._core._resources import aclose_forcefully as aclose_forcefully
|
||||
from ._core._signals import open_signal_receiver as open_signal_receiver
|
||||
from ._core._sockets import TCPConnectable as TCPConnectable
|
||||
from ._core._sockets import UNIXConnectable as UNIXConnectable
|
||||
from ._core._sockets import as_connectable as as_connectable
|
||||
from ._core._sockets import connect_tcp as connect_tcp
|
||||
from ._core._sockets import connect_unix as connect_unix
|
||||
from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
|
||||
from ._core._sockets import (
|
||||
create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
|
||||
)
|
||||
from ._core._sockets import create_tcp_listener as create_tcp_listener
|
||||
from ._core._sockets import create_udp_socket as create_udp_socket
|
||||
from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
|
||||
from ._core._sockets import create_unix_listener as create_unix_listener
|
||||
from ._core._sockets import getaddrinfo as getaddrinfo
|
||||
from ._core._sockets import getnameinfo as getnameinfo
|
||||
from ._core._sockets import notify_closing as notify_closing
|
||||
from ._core._sockets import wait_readable as wait_readable
|
||||
from ._core._sockets import wait_socket_readable as wait_socket_readable
|
||||
from ._core._sockets import wait_socket_writable as wait_socket_writable
|
||||
from ._core._sockets import wait_writable as wait_writable
|
||||
from ._core._streams import create_memory_object_stream as create_memory_object_stream
|
||||
from ._core._subprocesses import open_process as open_process
|
||||
from ._core._subprocesses import run_process as run_process
|
||||
from ._core._synchronization import CapacityLimiter as CapacityLimiter
|
||||
from ._core._synchronization import (
|
||||
CapacityLimiterStatistics as CapacityLimiterStatistics,
|
||||
)
|
||||
from ._core._synchronization import Condition as Condition
|
||||
from ._core._synchronization import ConditionStatistics as ConditionStatistics
|
||||
from ._core._synchronization import Event as Event
|
||||
from ._core._synchronization import EventStatistics as EventStatistics
|
||||
from ._core._synchronization import Lock as Lock
|
||||
from ._core._synchronization import LockStatistics as LockStatistics
|
||||
from ._core._synchronization import ResourceGuard as ResourceGuard
|
||||
from ._core._synchronization import Semaphore as Semaphore
|
||||
from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
|
||||
from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
|
||||
from ._core._tasks import CancelScope as CancelScope
|
||||
from ._core._tasks import create_task_group as create_task_group
|
||||
from ._core._tasks import current_effective_deadline as current_effective_deadline
|
||||
from ._core._tasks import fail_after as fail_after
|
||||
from ._core._tasks import move_on_after as move_on_after
|
||||
from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
|
||||
from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
|
||||
from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
|
||||
from ._core._tempfile import TemporaryFile as TemporaryFile
|
||||
from ._core._tempfile import gettempdir as gettempdir
|
||||
from ._core._tempfile import gettempdirb as gettempdirb
|
||||
from ._core._tempfile import mkdtemp as mkdtemp
|
||||
from ._core._tempfile import mkstemp as mkstemp
|
||||
from ._core._testing import TaskInfo as TaskInfo
|
||||
from ._core._testing import get_current_task as get_current_task
|
||||
from ._core._testing import get_running_tasks as get_running_tasks
|
||||
from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
|
||||
from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
|
||||
from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
|
||||
from ._core._typedattr import typed_attribute as typed_attribute
|
||||
|
||||
# Re-export imports so they look like they live directly in this package
|
||||
for __value in list(locals().values()):
|
||||
if getattr(__value, "__module__", "").startswith("anyio."):
|
||||
__value.__module__ = __name__
|
||||
|
||||
|
||||
del __value
|
||||
|
||||
|
||||
def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]:
|
||||
"""Support deprecated aliases."""
|
||||
if attr == "BrokenWorkerIntepreter":
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return BrokenWorkerInterpreter
|
||||
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
2980
venv/lib/python3.11/site-packages/anyio/_backends/_asyncio.py
Normal file
2980
venv/lib/python3.11/site-packages/anyio/_backends/_asyncio.py
Normal file
File diff suppressed because it is too large
Load Diff
1346
venv/lib/python3.11/site-packages/anyio/_backends/_trio.py
Normal file
1346
venv/lib/python3.11/site-packages/anyio/_backends/_trio.py
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,167 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import socket
|
||||
import threading
|
||||
from collections.abc import Callable
|
||||
from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from _typeshed import FileDescriptorLike
|
||||
|
||||
_selector_lock = threading.Lock()
|
||||
_selector: Selector | None = None
|
||||
|
||||
|
||||
class Selector:
|
||||
def __init__(self) -> None:
|
||||
self._thread = threading.Thread(target=self.run, name="AnyIO socket selector")
|
||||
self._selector = DefaultSelector()
|
||||
self._send, self._receive = socket.socketpair()
|
||||
self._send.setblocking(False)
|
||||
self._receive.setblocking(False)
|
||||
# This somewhat reduces the amount of memory wasted queueing up data
|
||||
# for wakeups. With these settings, maximum number of 1-byte sends
|
||||
# before getting BlockingIOError:
|
||||
# Linux 4.8: 6
|
||||
# macOS (darwin 15.5): 1
|
||||
# Windows 10: 525347
|
||||
# Windows you're weird. (And on Windows setting SNDBUF to 0 makes send
|
||||
# blocking, even on non-blocking sockets, so don't do that.)
|
||||
self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1)
|
||||
self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1)
|
||||
# On Windows this is a TCP socket so this might matter. On other
|
||||
# platforms this fails b/c AF_UNIX sockets aren't actually TCP.
|
||||
try:
|
||||
self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
self._selector.register(self._receive, EVENT_READ)
|
||||
self._closed = False
|
||||
|
||||
def start(self) -> None:
|
||||
self._thread.start()
|
||||
threading._register_atexit(self._stop) # type: ignore[attr-defined]
|
||||
|
||||
def _stop(self) -> None:
|
||||
global _selector
|
||||
self._closed = True
|
||||
self._notify_self()
|
||||
self._send.close()
|
||||
self._thread.join()
|
||||
self._selector.unregister(self._receive)
|
||||
self._receive.close()
|
||||
self._selector.close()
|
||||
_selector = None
|
||||
assert not self._selector.get_map(), (
|
||||
"selector still has registered file descriptors after shutdown"
|
||||
)
|
||||
|
||||
def _notify_self(self) -> None:
|
||||
try:
|
||||
self._send.send(b"\x00")
|
||||
except BlockingIOError:
|
||||
pass
|
||||
|
||||
def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
|
||||
loop = asyncio.get_running_loop()
|
||||
try:
|
||||
key = self._selector.get_key(fd)
|
||||
except KeyError:
|
||||
self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)})
|
||||
else:
|
||||
if EVENT_READ in key.data:
|
||||
raise ValueError(
|
||||
"this file descriptor is already registered for reading"
|
||||
)
|
||||
|
||||
key.data[EVENT_READ] = loop, callback
|
||||
self._selector.modify(fd, key.events | EVENT_READ, key.data)
|
||||
|
||||
self._notify_self()
|
||||
|
||||
def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
|
||||
loop = asyncio.get_running_loop()
|
||||
try:
|
||||
key = self._selector.get_key(fd)
|
||||
except KeyError:
|
||||
self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)})
|
||||
else:
|
||||
if EVENT_WRITE in key.data:
|
||||
raise ValueError(
|
||||
"this file descriptor is already registered for writing"
|
||||
)
|
||||
|
||||
key.data[EVENT_WRITE] = loop, callback
|
||||
self._selector.modify(fd, key.events | EVENT_WRITE, key.data)
|
||||
|
||||
self._notify_self()
|
||||
|
||||
def remove_reader(self, fd: FileDescriptorLike) -> bool:
|
||||
try:
|
||||
key = self._selector.get_key(fd)
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
if new_events := key.events ^ EVENT_READ:
|
||||
del key.data[EVENT_READ]
|
||||
self._selector.modify(fd, new_events, key.data)
|
||||
else:
|
||||
self._selector.unregister(fd)
|
||||
|
||||
return True
|
||||
|
||||
def remove_writer(self, fd: FileDescriptorLike) -> bool:
|
||||
try:
|
||||
key = self._selector.get_key(fd)
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
if new_events := key.events ^ EVENT_WRITE:
|
||||
del key.data[EVENT_WRITE]
|
||||
self._selector.modify(fd, new_events, key.data)
|
||||
else:
|
||||
self._selector.unregister(fd)
|
||||
|
||||
return True
|
||||
|
||||
def run(self) -> None:
|
||||
while not self._closed:
|
||||
for key, events in self._selector.select():
|
||||
if key.fileobj is self._receive:
|
||||
try:
|
||||
while self._receive.recv(4096):
|
||||
pass
|
||||
except BlockingIOError:
|
||||
pass
|
||||
|
||||
continue
|
||||
|
||||
if events & EVENT_READ:
|
||||
loop, callback = key.data[EVENT_READ]
|
||||
self.remove_reader(key.fd)
|
||||
try:
|
||||
loop.call_soon_threadsafe(callback)
|
||||
except RuntimeError:
|
||||
pass # the loop was already closed
|
||||
|
||||
if events & EVENT_WRITE:
|
||||
loop, callback = key.data[EVENT_WRITE]
|
||||
self.remove_writer(key.fd)
|
||||
try:
|
||||
loop.call_soon_threadsafe(callback)
|
||||
except RuntimeError:
|
||||
pass # the loop was already closed
|
||||
|
||||
|
||||
def get_selector() -> Selector:
|
||||
global _selector
|
||||
|
||||
with _selector_lock:
|
||||
if _selector is None:
|
||||
_selector = Selector()
|
||||
_selector.start()
|
||||
|
||||
return _selector
|
||||
@@ -0,0 +1,200 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import abstractmethod
|
||||
from contextlib import AbstractAsyncContextManager, AbstractContextManager
|
||||
from inspect import isasyncgen, iscoroutine, isgenerator
|
||||
from types import TracebackType
|
||||
from typing import Protocol, TypeVar, cast, final
|
||||
|
||||
_T_co = TypeVar("_T_co", covariant=True)
|
||||
_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None")
|
||||
|
||||
|
||||
class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]):
|
||||
def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ...
|
||||
|
||||
|
||||
class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]):
|
||||
def __asynccontextmanager__(
|
||||
self,
|
||||
) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ...
|
||||
|
||||
|
||||
class ContextManagerMixin:
|
||||
"""
|
||||
Mixin class providing context manager functionality via a generator-based
|
||||
implementation.
|
||||
|
||||
This class allows you to implement a context manager via :meth:`__contextmanager__`
|
||||
which should return a generator. The mechanics are meant to mirror those of
|
||||
:func:`@contextmanager <contextlib.contextmanager>`.
|
||||
|
||||
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
|
||||
that once you enter it, you can't re-enter before first exiting it.
|
||||
|
||||
.. seealso:: :doc:`contextmanagers`
|
||||
"""
|
||||
|
||||
__cm: AbstractContextManager[object, bool | None] | None = None
|
||||
|
||||
@final
|
||||
def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co:
|
||||
# Needed for mypy to assume self still has the __cm member
|
||||
assert isinstance(self, ContextManagerMixin)
|
||||
if self.__cm is not None:
|
||||
raise RuntimeError(
|
||||
f"this {self.__class__.__qualname__} has already been entered"
|
||||
)
|
||||
|
||||
cm = self.__contextmanager__()
|
||||
if not isinstance(cm, AbstractContextManager):
|
||||
if isgenerator(cm):
|
||||
raise TypeError(
|
||||
"__contextmanager__() returned a generator object instead of "
|
||||
"a context manager. Did you forget to add the @contextmanager "
|
||||
"decorator?"
|
||||
)
|
||||
|
||||
raise TypeError(
|
||||
f"__contextmanager__() did not return a context manager object, "
|
||||
f"but {cm.__class__!r}"
|
||||
)
|
||||
|
||||
if cm is self:
|
||||
raise TypeError(
|
||||
f"{self.__class__.__qualname__}.__contextmanager__() returned "
|
||||
f"self. Did you forget to add the @contextmanager decorator and a "
|
||||
f"'yield' statement?"
|
||||
)
|
||||
|
||||
value = cm.__enter__()
|
||||
self.__cm = cm
|
||||
return value
|
||||
|
||||
@final
|
||||
def __exit__(
|
||||
self: _SupportsCtxMgr[object, _ExitT_co],
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> _ExitT_co:
|
||||
# Needed for mypy to assume self still has the __cm member
|
||||
assert isinstance(self, ContextManagerMixin)
|
||||
if self.__cm is None:
|
||||
raise RuntimeError(
|
||||
f"this {self.__class__.__qualname__} has not been entered yet"
|
||||
)
|
||||
|
||||
# Prevent circular references
|
||||
cm = self.__cm
|
||||
del self.__cm
|
||||
|
||||
return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb))
|
||||
|
||||
@abstractmethod
|
||||
def __contextmanager__(self) -> AbstractContextManager[object, bool | None]:
|
||||
"""
|
||||
Implement your context manager logic here.
|
||||
|
||||
This method **must** be decorated with
|
||||
:func:`@contextmanager <contextlib.contextmanager>`.
|
||||
|
||||
.. note:: Remember that the ``yield`` will raise any exception raised in the
|
||||
enclosed context block, so use a ``finally:`` block to clean up resources!
|
||||
|
||||
:return: a context manager object
|
||||
"""
|
||||
|
||||
|
||||
class AsyncContextManagerMixin:
|
||||
"""
|
||||
Mixin class providing async context manager functionality via a generator-based
|
||||
implementation.
|
||||
|
||||
This class allows you to implement a context manager via
|
||||
:meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of
|
||||
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
|
||||
|
||||
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
|
||||
that once you enter it, you can't re-enter before first exiting it.
|
||||
|
||||
.. seealso:: :doc:`contextmanagers`
|
||||
"""
|
||||
|
||||
__cm: AbstractAsyncContextManager[object, bool | None] | None = None
|
||||
|
||||
@final
|
||||
async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co:
|
||||
# Needed for mypy to assume self still has the __cm member
|
||||
assert isinstance(self, AsyncContextManagerMixin)
|
||||
if self.__cm is not None:
|
||||
raise RuntimeError(
|
||||
f"this {self.__class__.__qualname__} has already been entered"
|
||||
)
|
||||
|
||||
cm = self.__asynccontextmanager__()
|
||||
if not isinstance(cm, AbstractAsyncContextManager):
|
||||
if isasyncgen(cm):
|
||||
raise TypeError(
|
||||
"__asynccontextmanager__() returned an async generator instead of "
|
||||
"an async context manager. Did you forget to add the "
|
||||
"@asynccontextmanager decorator?"
|
||||
)
|
||||
elif iscoroutine(cm):
|
||||
cm.close()
|
||||
raise TypeError(
|
||||
"__asynccontextmanager__() returned a coroutine object instead of "
|
||||
"an async context manager. Did you forget to add the "
|
||||
"@asynccontextmanager decorator and a 'yield' statement?"
|
||||
)
|
||||
|
||||
raise TypeError(
|
||||
f"__asynccontextmanager__() did not return an async context manager, "
|
||||
f"but {cm.__class__!r}"
|
||||
)
|
||||
|
||||
if cm is self:
|
||||
raise TypeError(
|
||||
f"{self.__class__.__qualname__}.__asynccontextmanager__() returned "
|
||||
f"self. Did you forget to add the @asynccontextmanager decorator and a "
|
||||
f"'yield' statement?"
|
||||
)
|
||||
|
||||
value = await cm.__aenter__()
|
||||
self.__cm = cm
|
||||
return value
|
||||
|
||||
@final
|
||||
async def __aexit__(
|
||||
self: _SupportsAsyncCtxMgr[object, _ExitT_co],
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> _ExitT_co:
|
||||
assert isinstance(self, AsyncContextManagerMixin)
|
||||
if self.__cm is None:
|
||||
raise RuntimeError(
|
||||
f"this {self.__class__.__qualname__} has not been entered yet"
|
||||
)
|
||||
|
||||
# Prevent circular references
|
||||
cm = self.__cm
|
||||
del self.__cm
|
||||
|
||||
return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb))
|
||||
|
||||
@abstractmethod
|
||||
def __asynccontextmanager__(
|
||||
self,
|
||||
) -> AbstractAsyncContextManager[object, bool | None]:
|
||||
"""
|
||||
Implement your async context manager logic here.
|
||||
|
||||
This method **must** be decorated with
|
||||
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
|
||||
|
||||
.. note:: Remember that the ``yield`` will raise any exception raised in the
|
||||
enclosed context block, so use a ``finally:`` block to clean up resources!
|
||||
|
||||
:return: an async context manager object
|
||||
"""
|
||||
234
venv/lib/python3.11/site-packages/anyio/_core/_eventloop.py
Normal file
234
venv/lib/python3.11/site-packages/anyio/_core/_eventloop.py
Normal file
@@ -0,0 +1,234 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
import sys
|
||||
import threading
|
||||
from collections.abc import Awaitable, Callable, Generator
|
||||
from contextlib import contextmanager
|
||||
from contextvars import Token
|
||||
from importlib import import_module
|
||||
from typing import TYPE_CHECKING, Any, TypeVar
|
||||
|
||||
from ._exceptions import NoEventLoopError
|
||||
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import TypeVarTuple, Unpack
|
||||
else:
|
||||
from typing_extensions import TypeVarTuple, Unpack
|
||||
|
||||
sniffio: Any
|
||||
try:
|
||||
import sniffio
|
||||
except ModuleNotFoundError:
|
||||
sniffio = None
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..abc import AsyncBackend
|
||||
|
||||
# This must be updated when new backends are introduced
|
||||
BACKENDS = "asyncio", "trio"
|
||||
|
||||
T_Retval = TypeVar("T_Retval")
|
||||
PosArgsT = TypeVarTuple("PosArgsT")
|
||||
|
||||
threadlocals = threading.local()
|
||||
loaded_backends: dict[str, type[AsyncBackend]] = {}
|
||||
|
||||
|
||||
def run(
|
||||
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
||||
*args: Unpack[PosArgsT],
|
||||
backend: str = "asyncio",
|
||||
backend_options: dict[str, Any] | None = None,
|
||||
) -> T_Retval:
|
||||
"""
|
||||
Run the given coroutine function in an asynchronous event loop.
|
||||
|
||||
The current thread must not be already running an event loop.
|
||||
|
||||
:param func: a coroutine function
|
||||
:param args: positional arguments to ``func``
|
||||
:param backend: name of the asynchronous event loop implementation – currently
|
||||
either ``asyncio`` or ``trio``
|
||||
:param backend_options: keyword arguments to call the backend ``run()``
|
||||
implementation with (documented :ref:`here <backend options>`)
|
||||
:return: the return value of the coroutine function
|
||||
:raises RuntimeError: if an asynchronous event loop is already running in this
|
||||
thread
|
||||
:raises LookupError: if the named backend is not found
|
||||
|
||||
"""
|
||||
if asynclib_name := current_async_library():
|
||||
raise RuntimeError(f"Already running {asynclib_name} in this thread")
|
||||
|
||||
try:
|
||||
async_backend = get_async_backend(backend)
|
||||
except ImportError as exc:
|
||||
raise LookupError(f"No such backend: {backend}") from exc
|
||||
|
||||
token = None
|
||||
if asynclib_name is None:
|
||||
# Since we're in control of the event loop, we can cache the name of the async
|
||||
# library
|
||||
token = set_current_async_library(backend)
|
||||
|
||||
try:
|
||||
backend_options = backend_options or {}
|
||||
return async_backend.run(func, args, {}, backend_options)
|
||||
finally:
|
||||
reset_current_async_library(token)
|
||||
|
||||
|
||||
async def sleep(delay: float) -> None:
|
||||
"""
|
||||
Pause the current task for the specified duration.
|
||||
|
||||
:param delay: the duration, in seconds
|
||||
|
||||
"""
|
||||
return await get_async_backend().sleep(delay)
|
||||
|
||||
|
||||
async def sleep_forever() -> None:
|
||||
"""
|
||||
Pause the current task until it's cancelled.
|
||||
|
||||
This is a shortcut for ``sleep(math.inf)``.
|
||||
|
||||
.. versionadded:: 3.1
|
||||
|
||||
"""
|
||||
await sleep(math.inf)
|
||||
|
||||
|
||||
async def sleep_until(deadline: float) -> None:
|
||||
"""
|
||||
Pause the current task until the given time.
|
||||
|
||||
:param deadline: the absolute time to wake up at (according to the internal
|
||||
monotonic clock of the event loop)
|
||||
|
||||
.. versionadded:: 3.1
|
||||
|
||||
"""
|
||||
now = current_time()
|
||||
await sleep(max(deadline - now, 0))
|
||||
|
||||
|
||||
def current_time() -> float:
|
||||
"""
|
||||
Return the current value of the event loop's internal clock.
|
||||
|
||||
:return: the clock value (seconds)
|
||||
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||
current thread
|
||||
|
||||
"""
|
||||
return get_async_backend().current_time()
|
||||
|
||||
|
||||
def get_all_backends() -> tuple[str, ...]:
|
||||
"""Return a tuple of the names of all built-in backends."""
|
||||
return BACKENDS
|
||||
|
||||
|
||||
def get_available_backends() -> tuple[str, ...]:
|
||||
"""
|
||||
Test for the availability of built-in backends.
|
||||
|
||||
:return a tuple of the built-in backend names that were successfully imported
|
||||
|
||||
.. versionadded:: 4.12
|
||||
|
||||
"""
|
||||
available_backends: list[str] = []
|
||||
for backend_name in get_all_backends():
|
||||
try:
|
||||
get_async_backend(backend_name)
|
||||
except ImportError:
|
||||
continue
|
||||
|
||||
available_backends.append(backend_name)
|
||||
|
||||
return tuple(available_backends)
|
||||
|
||||
|
||||
def get_cancelled_exc_class() -> type[BaseException]:
|
||||
"""
|
||||
Return the current async library's cancellation exception class.
|
||||
|
||||
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||
current thread
|
||||
|
||||
"""
|
||||
return get_async_backend().cancelled_exception_class()
|
||||
|
||||
|
||||
#
|
||||
# Private API
|
||||
#
|
||||
|
||||
|
||||
@contextmanager
|
||||
def claim_worker_thread(
|
||||
backend_class: type[AsyncBackend], token: object
|
||||
) -> Generator[Any, None, None]:
|
||||
from ..lowlevel import EventLoopToken
|
||||
|
||||
threadlocals.current_token = EventLoopToken(backend_class, token)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
del threadlocals.current_token
|
||||
|
||||
|
||||
def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]:
|
||||
if asynclib_name is None:
|
||||
asynclib_name = current_async_library()
|
||||
if not asynclib_name:
|
||||
raise NoEventLoopError(
|
||||
f"Not currently running on any asynchronous event loop. "
|
||||
f"Available async backends: {', '.join(get_all_backends())}"
|
||||
)
|
||||
|
||||
# We use our own dict instead of sys.modules to get the already imported back-end
|
||||
# class because the appropriate modules in sys.modules could potentially be only
|
||||
# partially initialized
|
||||
try:
|
||||
return loaded_backends[asynclib_name]
|
||||
except KeyError:
|
||||
module = import_module(f"anyio._backends._{asynclib_name}")
|
||||
loaded_backends[asynclib_name] = module.backend_class
|
||||
return module.backend_class
|
||||
|
||||
|
||||
def current_async_library() -> str | None:
|
||||
if sniffio is None:
|
||||
# If sniffio is not installed, we assume we're either running asyncio or nothing
|
||||
import asyncio
|
||||
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
return "asyncio"
|
||||
except RuntimeError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
return sniffio.current_async_library()
|
||||
except sniffio.AsyncLibraryNotFoundError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def set_current_async_library(asynclib_name: str | None) -> Token | None:
|
||||
# no-op if sniffio is not installed
|
||||
if sniffio is None:
|
||||
return None
|
||||
|
||||
return sniffio.current_async_library_cvar.set(asynclib_name)
|
||||
|
||||
|
||||
def reset_current_async_library(token: Token | None) -> None:
|
||||
if token is not None:
|
||||
sniffio.current_async_library_cvar.reset(token)
|
||||
156
venv/lib/python3.11/site-packages/anyio/_core/_exceptions.py
Normal file
156
venv/lib/python3.11/site-packages/anyio/_core/_exceptions.py
Normal file
@@ -0,0 +1,156 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from collections.abc import Generator
|
||||
from textwrap import dedent
|
||||
from typing import Any
|
||||
|
||||
if sys.version_info < (3, 11):
|
||||
from exceptiongroup import BaseExceptionGroup
|
||||
|
||||
|
||||
class BrokenResourceError(Exception):
|
||||
"""
|
||||
Raised when trying to use a resource that has been rendered unusable due to external
|
||||
causes (e.g. a send stream whose peer has disconnected).
|
||||
"""
|
||||
|
||||
|
||||
class BrokenWorkerProcess(Exception):
|
||||
"""
|
||||
Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or
|
||||
otherwise misbehaves.
|
||||
"""
|
||||
|
||||
|
||||
class BrokenWorkerInterpreter(Exception):
|
||||
"""
|
||||
Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is
|
||||
raised in the subinterpreter.
|
||||
"""
|
||||
|
||||
def __init__(self, excinfo: Any):
|
||||
# This was adapted from concurrent.futures.interpreter.ExecutionFailed
|
||||
msg = excinfo.formatted
|
||||
if not msg:
|
||||
if excinfo.type and excinfo.msg:
|
||||
msg = f"{excinfo.type.__name__}: {excinfo.msg}"
|
||||
else:
|
||||
msg = excinfo.type.__name__ or excinfo.msg
|
||||
|
||||
super().__init__(msg)
|
||||
self.excinfo = excinfo
|
||||
|
||||
def __str__(self) -> str:
|
||||
try:
|
||||
formatted = self.excinfo.errdisplay
|
||||
except Exception:
|
||||
return super().__str__()
|
||||
else:
|
||||
return dedent(
|
||||
f"""
|
||||
{super().__str__()}
|
||||
|
||||
Uncaught in the interpreter:
|
||||
|
||||
{formatted}
|
||||
""".strip()
|
||||
)
|
||||
|
||||
|
||||
class BusyResourceError(Exception):
|
||||
"""
|
||||
Raised when two tasks are trying to read from or write to the same resource
|
||||
concurrently.
|
||||
"""
|
||||
|
||||
def __init__(self, action: str):
|
||||
super().__init__(f"Another task is already {action} this resource")
|
||||
|
||||
|
||||
class ClosedResourceError(Exception):
|
||||
"""Raised when trying to use a resource that has been closed."""
|
||||
|
||||
|
||||
class ConnectionFailed(OSError):
|
||||
"""
|
||||
Raised when a connection attempt fails.
|
||||
|
||||
.. note:: This class inherits from :exc:`OSError` for backwards compatibility.
|
||||
"""
|
||||
|
||||
|
||||
def iterate_exceptions(
|
||||
exception: BaseException,
|
||||
) -> Generator[BaseException, None, None]:
|
||||
if isinstance(exception, BaseExceptionGroup):
|
||||
for exc in exception.exceptions:
|
||||
yield from iterate_exceptions(exc)
|
||||
else:
|
||||
yield exception
|
||||
|
||||
|
||||
class DelimiterNotFound(Exception):
|
||||
"""
|
||||
Raised during
|
||||
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
||||
maximum number of bytes has been read without the delimiter being found.
|
||||
"""
|
||||
|
||||
def __init__(self, max_bytes: int) -> None:
|
||||
super().__init__(
|
||||
f"The delimiter was not found among the first {max_bytes} bytes"
|
||||
)
|
||||
|
||||
|
||||
class EndOfStream(Exception):
|
||||
"""
|
||||
Raised when trying to read from a stream that has been closed from the other end.
|
||||
"""
|
||||
|
||||
|
||||
class IncompleteRead(Exception):
|
||||
"""
|
||||
Raised during
|
||||
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
|
||||
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
||||
connection is closed before the requested amount of bytes has been read.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
"The stream was closed before the read operation could be completed"
|
||||
)
|
||||
|
||||
|
||||
class TypedAttributeLookupError(LookupError):
|
||||
"""
|
||||
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute
|
||||
is not found and no default value has been given.
|
||||
"""
|
||||
|
||||
|
||||
class WouldBlock(Exception):
|
||||
"""Raised by ``X_nowait`` functions if ``X()`` would block."""
|
||||
|
||||
|
||||
class NoEventLoopError(RuntimeError):
|
||||
"""
|
||||
Raised by several functions that require an event loop to be running in the current
|
||||
thread when there is no running event loop.
|
||||
|
||||
This is also raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync`
|
||||
if not calling from an AnyIO worker thread, and no ``token`` was passed.
|
||||
"""
|
||||
|
||||
|
||||
class RunFinishedError(RuntimeError):
|
||||
"""
|
||||
Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if the event
|
||||
loop associated with the explicitly passed token has already finished.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
super().__init__(
|
||||
"The event loop associated with the given token has already finished"
|
||||
)
|
||||
797
venv/lib/python3.11/site-packages/anyio/_core/_fileio.py
Normal file
797
venv/lib/python3.11/site-packages/anyio/_core/_fileio.py
Normal file
@@ -0,0 +1,797 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from collections.abc import (
|
||||
AsyncIterator,
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Sequence,
|
||||
)
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from os import PathLike
|
||||
from typing import (
|
||||
IO,
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AnyStr,
|
||||
ClassVar,
|
||||
Final,
|
||||
Generic,
|
||||
overload,
|
||||
)
|
||||
|
||||
from .. import to_thread
|
||||
from ..abc import AsyncResource
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from types import ModuleType
|
||||
|
||||
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
|
||||
else:
|
||||
ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
|
||||
|
||||
|
||||
class AsyncFile(AsyncResource, Generic[AnyStr]):
|
||||
"""
|
||||
An asynchronous file object.
|
||||
|
||||
This class wraps a standard file object and provides async friendly versions of the
|
||||
following blocking methods (where available on the original file object):
|
||||
|
||||
* read
|
||||
* read1
|
||||
* readline
|
||||
* readlines
|
||||
* readinto
|
||||
* readinto1
|
||||
* write
|
||||
* writelines
|
||||
* truncate
|
||||
* seek
|
||||
* tell
|
||||
* flush
|
||||
|
||||
All other methods are directly passed through.
|
||||
|
||||
This class supports the asynchronous context manager protocol which closes the
|
||||
underlying file at the end of the context block.
|
||||
|
||||
This class also supports asynchronous iteration::
|
||||
|
||||
async with await open_file(...) as f:
|
||||
async for line in f:
|
||||
print(line)
|
||||
"""
|
||||
|
||||
def __init__(self, fp: IO[AnyStr]) -> None:
|
||||
self._fp: Any = fp
|
||||
|
||||
def __getattr__(self, name: str) -> object:
|
||||
return getattr(self._fp, name)
|
||||
|
||||
@property
|
||||
def wrapped(self) -> IO[AnyStr]:
|
||||
"""The wrapped file object."""
|
||||
return self._fp
|
||||
|
||||
async def __aiter__(self) -> AsyncIterator[AnyStr]:
|
||||
while True:
|
||||
line = await self.readline()
|
||||
if line:
|
||||
yield line
|
||||
else:
|
||||
break
|
||||
|
||||
async def aclose(self) -> None:
|
||||
return await to_thread.run_sync(self._fp.close)
|
||||
|
||||
async def read(self, size: int = -1) -> AnyStr:
|
||||
return await to_thread.run_sync(self._fp.read, size)
|
||||
|
||||
async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes:
|
||||
return await to_thread.run_sync(self._fp.read1, size)
|
||||
|
||||
async def readline(self) -> AnyStr:
|
||||
return await to_thread.run_sync(self._fp.readline)
|
||||
|
||||
async def readlines(self) -> list[AnyStr]:
|
||||
return await to_thread.run_sync(self._fp.readlines)
|
||||
|
||||
async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
|
||||
return await to_thread.run_sync(self._fp.readinto, b)
|
||||
|
||||
async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
|
||||
return await to_thread.run_sync(self._fp.readinto1, b)
|
||||
|
||||
@overload
|
||||
async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ...
|
||||
|
||||
@overload
|
||||
async def write(self: AsyncFile[str], b: str) -> int: ...
|
||||
|
||||
async def write(self, b: ReadableBuffer | str) -> int:
|
||||
return await to_thread.run_sync(self._fp.write, b)
|
||||
|
||||
@overload
|
||||
async def writelines(
|
||||
self: AsyncFile[bytes], lines: Iterable[ReadableBuffer]
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ...
|
||||
|
||||
async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None:
|
||||
return await to_thread.run_sync(self._fp.writelines, lines)
|
||||
|
||||
async def truncate(self, size: int | None = None) -> int:
|
||||
return await to_thread.run_sync(self._fp.truncate, size)
|
||||
|
||||
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
|
||||
return await to_thread.run_sync(self._fp.seek, offset, whence)
|
||||
|
||||
async def tell(self) -> int:
|
||||
return await to_thread.run_sync(self._fp.tell)
|
||||
|
||||
async def flush(self) -> None:
|
||||
return await to_thread.run_sync(self._fp.flush)
|
||||
|
||||
|
||||
@overload
|
||||
async def open_file(
|
||||
file: str | PathLike[str] | int,
|
||||
mode: OpenBinaryMode,
|
||||
buffering: int = ...,
|
||||
encoding: str | None = ...,
|
||||
errors: str | None = ...,
|
||||
newline: str | None = ...,
|
||||
closefd: bool = ...,
|
||||
opener: Callable[[str, int], int] | None = ...,
|
||||
) -> AsyncFile[bytes]: ...
|
||||
|
||||
|
||||
@overload
|
||||
async def open_file(
|
||||
file: str | PathLike[str] | int,
|
||||
mode: OpenTextMode = ...,
|
||||
buffering: int = ...,
|
||||
encoding: str | None = ...,
|
||||
errors: str | None = ...,
|
||||
newline: str | None = ...,
|
||||
closefd: bool = ...,
|
||||
opener: Callable[[str, int], int] | None = ...,
|
||||
) -> AsyncFile[str]: ...
|
||||
|
||||
|
||||
async def open_file(
|
||||
file: str | PathLike[str] | int,
|
||||
mode: str = "r",
|
||||
buffering: int = -1,
|
||||
encoding: str | None = None,
|
||||
errors: str | None = None,
|
||||
newline: str | None = None,
|
||||
closefd: bool = True,
|
||||
opener: Callable[[str, int], int] | None = None,
|
||||
) -> AsyncFile[Any]:
|
||||
"""
|
||||
Open a file asynchronously.
|
||||
|
||||
The arguments are exactly the same as for the builtin :func:`open`.
|
||||
|
||||
:return: an asynchronous file object
|
||||
|
||||
"""
|
||||
fp = await to_thread.run_sync(
|
||||
open, file, mode, buffering, encoding, errors, newline, closefd, opener
|
||||
)
|
||||
return AsyncFile(fp)
|
||||
|
||||
|
||||
def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
|
||||
"""
|
||||
Wrap an existing file as an asynchronous file.
|
||||
|
||||
:param file: an existing file-like object
|
||||
:return: an asynchronous file object
|
||||
|
||||
"""
|
||||
return AsyncFile(file)
|
||||
|
||||
|
||||
@dataclass(eq=False)
|
||||
class _PathIterator(AsyncIterator["Path"]):
|
||||
iterator: Iterator[PathLike[str]]
|
||||
|
||||
async def __anext__(self) -> Path:
|
||||
nextval = await to_thread.run_sync(
|
||||
next, self.iterator, None, abandon_on_cancel=True
|
||||
)
|
||||
if nextval is None:
|
||||
raise StopAsyncIteration from None
|
||||
|
||||
return Path(nextval)
|
||||
|
||||
|
||||
class Path:
|
||||
"""
|
||||
An asynchronous version of :class:`pathlib.Path`.
|
||||
|
||||
This class cannot be substituted for :class:`pathlib.Path` or
|
||||
:class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike`
|
||||
interface.
|
||||
|
||||
It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for
|
||||
the deprecated :meth:`~pathlib.Path.link_to` method.
|
||||
|
||||
Some methods may be unavailable or have limited functionality, based on the Python
|
||||
version:
|
||||
|
||||
* :meth:`~pathlib.Path.copy` (available on Python 3.14 or later)
|
||||
* :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later)
|
||||
* :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later)
|
||||
* :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later)
|
||||
* :attr:`~pathlib.Path.info` (available on Python 3.14 or later)
|
||||
* :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later)
|
||||
* :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only
|
||||
available on Python 3.13 or later)
|
||||
* :meth:`~pathlib.Path.move` (available on Python 3.14 or later)
|
||||
* :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later)
|
||||
* :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available
|
||||
on Python 3.12 or later)
|
||||
* :meth:`~pathlib.Path.walk` (available on Python 3.12 or later)
|
||||
|
||||
Any methods that do disk I/O need to be awaited on. These methods are:
|
||||
|
||||
* :meth:`~pathlib.Path.absolute`
|
||||
* :meth:`~pathlib.Path.chmod`
|
||||
* :meth:`~pathlib.Path.cwd`
|
||||
* :meth:`~pathlib.Path.exists`
|
||||
* :meth:`~pathlib.Path.expanduser`
|
||||
* :meth:`~pathlib.Path.group`
|
||||
* :meth:`~pathlib.Path.hardlink_to`
|
||||
* :meth:`~pathlib.Path.home`
|
||||
* :meth:`~pathlib.Path.is_block_device`
|
||||
* :meth:`~pathlib.Path.is_char_device`
|
||||
* :meth:`~pathlib.Path.is_dir`
|
||||
* :meth:`~pathlib.Path.is_fifo`
|
||||
* :meth:`~pathlib.Path.is_file`
|
||||
* :meth:`~pathlib.Path.is_junction`
|
||||
* :meth:`~pathlib.Path.is_mount`
|
||||
* :meth:`~pathlib.Path.is_socket`
|
||||
* :meth:`~pathlib.Path.is_symlink`
|
||||
* :meth:`~pathlib.Path.lchmod`
|
||||
* :meth:`~pathlib.Path.lstat`
|
||||
* :meth:`~pathlib.Path.mkdir`
|
||||
* :meth:`~pathlib.Path.open`
|
||||
* :meth:`~pathlib.Path.owner`
|
||||
* :meth:`~pathlib.Path.read_bytes`
|
||||
* :meth:`~pathlib.Path.read_text`
|
||||
* :meth:`~pathlib.Path.readlink`
|
||||
* :meth:`~pathlib.Path.rename`
|
||||
* :meth:`~pathlib.Path.replace`
|
||||
* :meth:`~pathlib.Path.resolve`
|
||||
* :meth:`~pathlib.Path.rmdir`
|
||||
* :meth:`~pathlib.Path.samefile`
|
||||
* :meth:`~pathlib.Path.stat`
|
||||
* :meth:`~pathlib.Path.symlink_to`
|
||||
* :meth:`~pathlib.Path.touch`
|
||||
* :meth:`~pathlib.Path.unlink`
|
||||
* :meth:`~pathlib.Path.walk`
|
||||
* :meth:`~pathlib.Path.write_bytes`
|
||||
* :meth:`~pathlib.Path.write_text`
|
||||
|
||||
Additionally, the following methods return an async iterator yielding
|
||||
:class:`~.Path` objects:
|
||||
|
||||
* :meth:`~pathlib.Path.glob`
|
||||
* :meth:`~pathlib.Path.iterdir`
|
||||
* :meth:`~pathlib.Path.rglob`
|
||||
"""
|
||||
|
||||
__slots__ = "_path", "__weakref__"
|
||||
|
||||
__weakref__: Any
|
||||
|
||||
def __init__(self, *args: str | PathLike[str]) -> None:
|
||||
self._path: Final[pathlib.Path] = pathlib.Path(*args)
|
||||
|
||||
def __fspath__(self) -> str:
|
||||
return self._path.__fspath__()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self._path.__str__()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.as_posix()!r})"
|
||||
|
||||
def __bytes__(self) -> bytes:
|
||||
return self._path.__bytes__()
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self._path.__hash__()
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
target = other._path if isinstance(other, Path) else other
|
||||
return self._path.__eq__(target)
|
||||
|
||||
def __lt__(self, other: pathlib.PurePath | Path) -> bool:
|
||||
target = other._path if isinstance(other, Path) else other
|
||||
return self._path.__lt__(target)
|
||||
|
||||
def __le__(self, other: pathlib.PurePath | Path) -> bool:
|
||||
target = other._path if isinstance(other, Path) else other
|
||||
return self._path.__le__(target)
|
||||
|
||||
def __gt__(self, other: pathlib.PurePath | Path) -> bool:
|
||||
target = other._path if isinstance(other, Path) else other
|
||||
return self._path.__gt__(target)
|
||||
|
||||
def __ge__(self, other: pathlib.PurePath | Path) -> bool:
|
||||
target = other._path if isinstance(other, Path) else other
|
||||
return self._path.__ge__(target)
|
||||
|
||||
def __truediv__(self, other: str | PathLike[str]) -> Path:
|
||||
return Path(self._path / other)
|
||||
|
||||
def __rtruediv__(self, other: str | PathLike[str]) -> Path:
|
||||
return Path(other) / self
|
||||
|
||||
@property
|
||||
def parts(self) -> tuple[str, ...]:
|
||||
return self._path.parts
|
||||
|
||||
@property
|
||||
def drive(self) -> str:
|
||||
return self._path.drive
|
||||
|
||||
@property
|
||||
def root(self) -> str:
|
||||
return self._path.root
|
||||
|
||||
@property
|
||||
def anchor(self) -> str:
|
||||
return self._path.anchor
|
||||
|
||||
@property
|
||||
def parents(self) -> Sequence[Path]:
|
||||
return tuple(Path(p) for p in self._path.parents)
|
||||
|
||||
@property
|
||||
def parent(self) -> Path:
|
||||
return Path(self._path.parent)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._path.name
|
||||
|
||||
@property
|
||||
def suffix(self) -> str:
|
||||
return self._path.suffix
|
||||
|
||||
@property
|
||||
def suffixes(self) -> list[str]:
|
||||
return self._path.suffixes
|
||||
|
||||
@property
|
||||
def stem(self) -> str:
|
||||
return self._path.stem
|
||||
|
||||
async def absolute(self) -> Path:
|
||||
path = await to_thread.run_sync(self._path.absolute)
|
||||
return Path(path)
|
||||
|
||||
def as_posix(self) -> str:
|
||||
return self._path.as_posix()
|
||||
|
||||
def as_uri(self) -> str:
|
||||
return self._path.as_uri()
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
parser: ClassVar[ModuleType] = pathlib.Path.parser
|
||||
|
||||
@classmethod
|
||||
def from_uri(cls, uri: str) -> Path:
|
||||
return Path(pathlib.Path.from_uri(uri))
|
||||
|
||||
def full_match(
|
||||
self, path_pattern: str, *, case_sensitive: bool | None = None
|
||||
) -> bool:
|
||||
return self._path.full_match(path_pattern, case_sensitive=case_sensitive)
|
||||
|
||||
def match(
|
||||
self, path_pattern: str, *, case_sensitive: bool | None = None
|
||||
) -> bool:
|
||||
return self._path.match(path_pattern, case_sensitive=case_sensitive)
|
||||
else:
|
||||
|
||||
def match(self, path_pattern: str) -> bool:
|
||||
return self._path.match(path_pattern)
|
||||
|
||||
if sys.version_info >= (3, 14):
|
||||
|
||||
@property
|
||||
def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it
|
||||
return self._path.info
|
||||
|
||||
async def copy(
|
||||
self,
|
||||
target: str | os.PathLike[str],
|
||||
*,
|
||||
follow_symlinks: bool = True,
|
||||
preserve_metadata: bool = False,
|
||||
) -> Path:
|
||||
func = partial(
|
||||
self._path.copy,
|
||||
follow_symlinks=follow_symlinks,
|
||||
preserve_metadata=preserve_metadata,
|
||||
)
|
||||
return Path(await to_thread.run_sync(func, pathlib.Path(target)))
|
||||
|
||||
async def copy_into(
|
||||
self,
|
||||
target_dir: str | os.PathLike[str],
|
||||
*,
|
||||
follow_symlinks: bool = True,
|
||||
preserve_metadata: bool = False,
|
||||
) -> Path:
|
||||
func = partial(
|
||||
self._path.copy_into,
|
||||
follow_symlinks=follow_symlinks,
|
||||
preserve_metadata=preserve_metadata,
|
||||
)
|
||||
return Path(await to_thread.run_sync(func, pathlib.Path(target_dir)))
|
||||
|
||||
async def move(self, target: str | os.PathLike[str]) -> Path:
|
||||
# Upstream does not handle anyio.Path properly as a PathLike
|
||||
target = pathlib.Path(target)
|
||||
return Path(await to_thread.run_sync(self._path.move, target))
|
||||
|
||||
async def move_into(
|
||||
self,
|
||||
target_dir: str | os.PathLike[str],
|
||||
) -> Path:
|
||||
return Path(await to_thread.run_sync(self._path.move_into, target_dir))
|
||||
|
||||
def is_relative_to(self, other: str | PathLike[str]) -> bool:
|
||||
try:
|
||||
self.relative_to(other)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
|
||||
func = partial(os.chmod, follow_symlinks=follow_symlinks)
|
||||
return await to_thread.run_sync(func, self._path, mode)
|
||||
|
||||
@classmethod
|
||||
async def cwd(cls) -> Path:
|
||||
path = await to_thread.run_sync(pathlib.Path.cwd)
|
||||
return cls(path)
|
||||
|
||||
async def exists(self) -> bool:
|
||||
return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True)
|
||||
|
||||
async def expanduser(self) -> Path:
|
||||
return Path(
|
||||
await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True)
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 12):
|
||||
# Python 3.11 and earlier
|
||||
def glob(self, pattern: str) -> AsyncIterator[Path]:
|
||||
gen = self._path.glob(pattern)
|
||||
return _PathIterator(gen)
|
||||
elif (3, 12) <= sys.version_info < (3, 13):
|
||||
# changed in Python 3.12:
|
||||
# - The case_sensitive parameter was added.
|
||||
def glob(
|
||||
self,
|
||||
pattern: str,
|
||||
*,
|
||||
case_sensitive: bool | None = None,
|
||||
) -> AsyncIterator[Path]:
|
||||
gen = self._path.glob(pattern, case_sensitive=case_sensitive)
|
||||
return _PathIterator(gen)
|
||||
elif sys.version_info >= (3, 13):
|
||||
# Changed in Python 3.13:
|
||||
# - The recurse_symlinks parameter was added.
|
||||
# - The pattern parameter accepts a path-like object.
|
||||
def glob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
|
||||
self,
|
||||
pattern: str | PathLike[str],
|
||||
*,
|
||||
case_sensitive: bool | None = None,
|
||||
recurse_symlinks: bool = False,
|
||||
) -> AsyncIterator[Path]:
|
||||
gen = self._path.glob(
|
||||
pattern, # type: ignore[arg-type]
|
||||
case_sensitive=case_sensitive,
|
||||
recurse_symlinks=recurse_symlinks,
|
||||
)
|
||||
return _PathIterator(gen)
|
||||
|
||||
async def group(self) -> str:
|
||||
return await to_thread.run_sync(self._path.group, abandon_on_cancel=True)
|
||||
|
||||
async def hardlink_to(
|
||||
self, target: str | bytes | PathLike[str] | PathLike[bytes]
|
||||
) -> None:
|
||||
if isinstance(target, Path):
|
||||
target = target._path
|
||||
|
||||
await to_thread.run_sync(os.link, target, self)
|
||||
|
||||
@classmethod
|
||||
async def home(cls) -> Path:
|
||||
home_path = await to_thread.run_sync(pathlib.Path.home)
|
||||
return cls(home_path)
|
||||
|
||||
def is_absolute(self) -> bool:
|
||||
return self._path.is_absolute()
|
||||
|
||||
async def is_block_device(self) -> bool:
|
||||
return await to_thread.run_sync(
|
||||
self._path.is_block_device, abandon_on_cancel=True
|
||||
)
|
||||
|
||||
async def is_char_device(self) -> bool:
|
||||
return await to_thread.run_sync(
|
||||
self._path.is_char_device, abandon_on_cancel=True
|
||||
)
|
||||
|
||||
async def is_dir(self) -> bool:
|
||||
return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True)
|
||||
|
||||
async def is_fifo(self) -> bool:
|
||||
return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True)
|
||||
|
||||
async def is_file(self) -> bool:
|
||||
return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True)
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
|
||||
async def is_junction(self) -> bool:
|
||||
return await to_thread.run_sync(self._path.is_junction)
|
||||
|
||||
async def is_mount(self) -> bool:
|
||||
return await to_thread.run_sync(
|
||||
os.path.ismount, self._path, abandon_on_cancel=True
|
||||
)
|
||||
|
||||
def is_reserved(self) -> bool:
|
||||
return self._path.is_reserved()
|
||||
|
||||
async def is_socket(self) -> bool:
|
||||
return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True)
|
||||
|
||||
async def is_symlink(self) -> bool:
|
||||
return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True)
|
||||
|
||||
async def iterdir(self) -> AsyncIterator[Path]:
|
||||
gen = (
|
||||
self._path.iterdir()
|
||||
if sys.version_info < (3, 13)
|
||||
else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True)
|
||||
)
|
||||
async for path in _PathIterator(gen):
|
||||
yield path
|
||||
|
||||
def joinpath(self, *args: str | PathLike[str]) -> Path:
|
||||
return Path(self._path.joinpath(*args))
|
||||
|
||||
async def lchmod(self, mode: int) -> None:
|
||||
await to_thread.run_sync(self._path.lchmod, mode)
|
||||
|
||||
async def lstat(self) -> os.stat_result:
|
||||
return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True)
|
||||
|
||||
async def mkdir(
|
||||
self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
|
||||
) -> None:
|
||||
await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
|
||||
|
||||
@overload
|
||||
async def open(
|
||||
self,
|
||||
mode: OpenBinaryMode,
|
||||
buffering: int = ...,
|
||||
encoding: str | None = ...,
|
||||
errors: str | None = ...,
|
||||
newline: str | None = ...,
|
||||
) -> AsyncFile[bytes]: ...
|
||||
|
||||
@overload
|
||||
async def open(
|
||||
self,
|
||||
mode: OpenTextMode = ...,
|
||||
buffering: int = ...,
|
||||
encoding: str | None = ...,
|
||||
errors: str | None = ...,
|
||||
newline: str | None = ...,
|
||||
) -> AsyncFile[str]: ...
|
||||
|
||||
async def open(
|
||||
self,
|
||||
mode: str = "r",
|
||||
buffering: int = -1,
|
||||
encoding: str | None = None,
|
||||
errors: str | None = None,
|
||||
newline: str | None = None,
|
||||
) -> AsyncFile[Any]:
|
||||
fp = await to_thread.run_sync(
|
||||
self._path.open, mode, buffering, encoding, errors, newline
|
||||
)
|
||||
return AsyncFile(fp)
|
||||
|
||||
async def owner(self) -> str:
|
||||
return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True)
|
||||
|
||||
async def read_bytes(self) -> bytes:
|
||||
return await to_thread.run_sync(self._path.read_bytes)
|
||||
|
||||
async def read_text(
|
||||
self, encoding: str | None = None, errors: str | None = None
|
||||
) -> str:
|
||||
return await to_thread.run_sync(self._path.read_text, encoding, errors)
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
|
||||
def relative_to(
|
||||
self, *other: str | PathLike[str], walk_up: bool = False
|
||||
) -> Path:
|
||||
# relative_to() should work with any PathLike but it doesn't
|
||||
others = [pathlib.Path(other) for other in other]
|
||||
return Path(self._path.relative_to(*others, walk_up=walk_up))
|
||||
|
||||
else:
|
||||
|
||||
def relative_to(self, *other: str | PathLike[str]) -> Path:
|
||||
return Path(self._path.relative_to(*other))
|
||||
|
||||
async def readlink(self) -> Path:
|
||||
target = await to_thread.run_sync(os.readlink, self._path)
|
||||
return Path(target)
|
||||
|
||||
async def rename(self, target: str | pathlib.PurePath | Path) -> Path:
|
||||
if isinstance(target, Path):
|
||||
target = target._path
|
||||
|
||||
await to_thread.run_sync(self._path.rename, target)
|
||||
return Path(target)
|
||||
|
||||
async def replace(self, target: str | pathlib.PurePath | Path) -> Path:
|
||||
if isinstance(target, Path):
|
||||
target = target._path
|
||||
|
||||
await to_thread.run_sync(self._path.replace, target)
|
||||
return Path(target)
|
||||
|
||||
async def resolve(self, strict: bool = False) -> Path:
|
||||
func = partial(self._path.resolve, strict=strict)
|
||||
return Path(await to_thread.run_sync(func, abandon_on_cancel=True))
|
||||
|
||||
if sys.version_info < (3, 12):
|
||||
# Pre Python 3.12
|
||||
def rglob(self, pattern: str) -> AsyncIterator[Path]:
|
||||
gen = self._path.rglob(pattern)
|
||||
return _PathIterator(gen)
|
||||
elif (3, 12) <= sys.version_info < (3, 13):
|
||||
# Changed in Python 3.12:
|
||||
# - The case_sensitive parameter was added.
|
||||
def rglob(
|
||||
self, pattern: str, *, case_sensitive: bool | None = None
|
||||
) -> AsyncIterator[Path]:
|
||||
gen = self._path.rglob(pattern, case_sensitive=case_sensitive)
|
||||
return _PathIterator(gen)
|
||||
elif sys.version_info >= (3, 13):
|
||||
# Changed in Python 3.13:
|
||||
# - The recurse_symlinks parameter was added.
|
||||
# - The pattern parameter accepts a path-like object.
|
||||
def rglob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
|
||||
self,
|
||||
pattern: str | PathLike[str],
|
||||
*,
|
||||
case_sensitive: bool | None = None,
|
||||
recurse_symlinks: bool = False,
|
||||
) -> AsyncIterator[Path]:
|
||||
gen = self._path.rglob(
|
||||
pattern, # type: ignore[arg-type]
|
||||
case_sensitive=case_sensitive,
|
||||
recurse_symlinks=recurse_symlinks,
|
||||
)
|
||||
return _PathIterator(gen)
|
||||
|
||||
async def rmdir(self) -> None:
|
||||
await to_thread.run_sync(self._path.rmdir)
|
||||
|
||||
async def samefile(self, other_path: str | PathLike[str]) -> bool:
|
||||
if isinstance(other_path, Path):
|
||||
other_path = other_path._path
|
||||
|
||||
return await to_thread.run_sync(
|
||||
self._path.samefile, other_path, abandon_on_cancel=True
|
||||
)
|
||||
|
||||
async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
|
||||
func = partial(os.stat, follow_symlinks=follow_symlinks)
|
||||
return await to_thread.run_sync(func, self._path, abandon_on_cancel=True)
|
||||
|
||||
async def symlink_to(
|
||||
self,
|
||||
target: str | bytes | PathLike[str] | PathLike[bytes],
|
||||
target_is_directory: bool = False,
|
||||
) -> None:
|
||||
if isinstance(target, Path):
|
||||
target = target._path
|
||||
|
||||
await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
|
||||
|
||||
async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
|
||||
await to_thread.run_sync(self._path.touch, mode, exist_ok)
|
||||
|
||||
async def unlink(self, missing_ok: bool = False) -> None:
|
||||
try:
|
||||
await to_thread.run_sync(self._path.unlink)
|
||||
except FileNotFoundError:
|
||||
if not missing_ok:
|
||||
raise
|
||||
|
||||
if sys.version_info >= (3, 12):
|
||||
|
||||
async def walk(
|
||||
self,
|
||||
top_down: bool = True,
|
||||
on_error: Callable[[OSError], object] | None = None,
|
||||
follow_symlinks: bool = False,
|
||||
) -> AsyncIterator[tuple[Path, list[str], list[str]]]:
|
||||
def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None:
|
||||
try:
|
||||
return next(gen)
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
gen = self._path.walk(top_down, on_error, follow_symlinks)
|
||||
while True:
|
||||
value = await to_thread.run_sync(get_next_value)
|
||||
if value is None:
|
||||
return
|
||||
|
||||
root, dirs, paths = value
|
||||
yield Path(root), dirs, paths
|
||||
|
||||
def with_name(self, name: str) -> Path:
|
||||
return Path(self._path.with_name(name))
|
||||
|
||||
def with_stem(self, stem: str) -> Path:
|
||||
return Path(self._path.with_name(stem + self._path.suffix))
|
||||
|
||||
def with_suffix(self, suffix: str) -> Path:
|
||||
return Path(self._path.with_suffix(suffix))
|
||||
|
||||
def with_segments(self, *pathsegments: str | PathLike[str]) -> Path:
|
||||
return Path(*pathsegments)
|
||||
|
||||
async def write_bytes(self, data: bytes) -> int:
|
||||
return await to_thread.run_sync(self._path.write_bytes, data)
|
||||
|
||||
async def write_text(
|
||||
self,
|
||||
data: str,
|
||||
encoding: str | None = None,
|
||||
errors: str | None = None,
|
||||
newline: str | None = None,
|
||||
) -> int:
|
||||
# Path.write_text() does not support the "newline" parameter before Python 3.10
|
||||
def sync_write_text() -> int:
|
||||
with self._path.open(
|
||||
"w", encoding=encoding, errors=errors, newline=newline
|
||||
) as fp:
|
||||
return fp.write(data)
|
||||
|
||||
return await to_thread.run_sync(sync_write_text)
|
||||
|
||||
|
||||
PathLike.register(Path)
|
||||
18
venv/lib/python3.11/site-packages/anyio/_core/_resources.py
Normal file
18
venv/lib/python3.11/site-packages/anyio/_core/_resources.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from ..abc import AsyncResource
|
||||
from ._tasks import CancelScope
|
||||
|
||||
|
||||
async def aclose_forcefully(resource: AsyncResource) -> None:
|
||||
"""
|
||||
Close an asynchronous resource in a cancelled scope.
|
||||
|
||||
Doing this closes the resource without waiting on anything.
|
||||
|
||||
:param resource: the resource to close
|
||||
|
||||
"""
|
||||
with CancelScope() as scope:
|
||||
scope.cancel()
|
||||
await resource.aclose()
|
||||
29
venv/lib/python3.11/site-packages/anyio/_core/_signals.py
Normal file
29
venv/lib/python3.11/site-packages/anyio/_core/_signals.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import AbstractContextManager
|
||||
from signal import Signals
|
||||
|
||||
from ._eventloop import get_async_backend
|
||||
|
||||
|
||||
def open_signal_receiver(
|
||||
*signals: Signals,
|
||||
) -> AbstractContextManager[AsyncIterator[Signals]]:
|
||||
"""
|
||||
Start receiving operating system signals.
|
||||
|
||||
:param signals: signals to receive (e.g. ``signal.SIGINT``)
|
||||
:return: an asynchronous context manager for an asynchronous iterator which yields
|
||||
signal numbers
|
||||
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||
current thread
|
||||
|
||||
.. warning:: Windows does not support signals natively so it is best to avoid
|
||||
relying on this in cross-platform applications.
|
||||
|
||||
.. warning:: On asyncio, this permanently replaces any previous signal handler for
|
||||
the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
|
||||
|
||||
"""
|
||||
return get_async_backend().open_signal_receiver(*signals)
|
||||
1003
venv/lib/python3.11/site-packages/anyio/_core/_sockets.py
Normal file
1003
venv/lib/python3.11/site-packages/anyio/_core/_sockets.py
Normal file
File diff suppressed because it is too large
Load Diff
52
venv/lib/python3.11/site-packages/anyio/_core/_streams.py
Normal file
52
venv/lib/python3.11/site-packages/anyio/_core/_streams.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
from typing import TypeVar
|
||||
from warnings import warn
|
||||
|
||||
from ..streams.memory import (
|
||||
MemoryObjectReceiveStream,
|
||||
MemoryObjectSendStream,
|
||||
_MemoryObjectStreamState,
|
||||
)
|
||||
|
||||
T_Item = TypeVar("T_Item")
|
||||
|
||||
|
||||
class create_memory_object_stream(
|
||||
tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]],
|
||||
):
|
||||
"""
|
||||
Create a memory object stream.
|
||||
|
||||
The stream's item type can be annotated like
|
||||
:func:`create_memory_object_stream[T_Item]`.
|
||||
|
||||
:param max_buffer_size: number of items held in the buffer until ``send()`` starts
|
||||
blocking
|
||||
:param item_type: old way of marking the streams with the right generic type for
|
||||
static typing (does nothing on AnyIO 4)
|
||||
|
||||
.. deprecated:: 4.0
|
||||
Use ``create_memory_object_stream[YourItemType](...)`` instead.
|
||||
:return: a tuple of (send stream, receive stream)
|
||||
|
||||
"""
|
||||
|
||||
def __new__( # type: ignore[misc]
|
||||
cls, max_buffer_size: float = 0, item_type: object = None
|
||||
) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
|
||||
if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
|
||||
raise ValueError("max_buffer_size must be either an integer or math.inf")
|
||||
if max_buffer_size < 0:
|
||||
raise ValueError("max_buffer_size cannot be negative")
|
||||
if item_type is not None:
|
||||
warn(
|
||||
"The item_type argument has been deprecated in AnyIO 4.0. "
|
||||
"Use create_memory_object_stream[YourItemType](...) instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
state = _MemoryObjectStreamState[T_Item](max_buffer_size)
|
||||
return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user