Commit 885f6a7f authored by xianyang's avatar xianyang

Merge branch 'develop/xp' into development

# Conflicts:
#	models/recharge.py
parents 0f04d721 101d1370

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
<excludeFolder url="file://$MODULE_DIR$/fs-env" /> <excludeFolder url="file://$MODULE_DIR$/fs-env" />
<excludeFolder url="file://$MODULE_DIR$/venv" /> <excludeFolder url="file://$MODULE_DIR$/venv" />
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="jdk" jdkName="Python 3.8 (financial_system)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
<component name="PyDocumentationSettings"> <component name="PyDocumentationSettings">
......
...@@ -3,5 +3,5 @@ ...@@ -3,5 +3,5 @@
<component name="JavaScriptSettings"> <component name="JavaScriptSettings">
<option name="languageLevel" value="ES6" /> <option name="languageLevel" value="ES6" />
</component> </component>
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (financial-system)" project-jdk-type="Python SDK" /> <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (financial_system)" project-jdk-type="Python SDK" />
</project> </project>
\ No newline at end of file
...@@ -2,6 +2,8 @@ import math ...@@ -2,6 +2,8 @@ import math
import threading import threading
from sqlalchemy import and_, func from sqlalchemy import and_, func
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from libs.orm import QueryAllData
from models.recharge import Settlement from models.recharge import Settlement
locka = threading.Lock() locka = threading.Lock()
...@@ -67,3 +69,51 @@ class GuildSet(object): ...@@ -67,3 +69,51 @@ class GuildSet(object):
if (data.status or data.status == 0) and data.status < 2: if (data.status or data.status == 0) and data.status < 2:
db.query(Settlement).filter(Settlement.id == data.id).update({Settlement.status: data.status + 1}) db.query(Settlement).filter(Settlement.id == data.id).update({Settlement.status: data.status + 1})
db.commit() db.commit()
def paymentset_guild_data(db: Session,dbname,params):
paymentset_filters = []
if params.get("id"):
paymentset_filters.append(dbname.guild_id == params.get("guild_id"))
if params.get("cont"):
paymentset_filters.append(dbname.cont == params.get("cont"))
if params.get("money"):
paymentset_filters.append(dbname.money == params.get("money"))
if params.get("url"):
paymentset_filters.append(dbname.url == params.get("url"))
if params.get("uuid"):
paymentset_filters.append(dbname.uuid == params.get("uuid"))
if params.get("start_time"):
paymentset_filters.append(dbname.create_time >= params.get("start_time"))
if params.get("end_time"):
paymentset_filters.append(dbname.create_time <= params.get("end_time"))
querydata,count=QueryAllData(db,dbname,params,paymentset_filters).query_data()
data=[QueryAllData.serialization(item) for item in querydata]
return data,count
def outon_account_data(db: Session,dbname,params):
querydata,count=QueryAllData(db,dbname,params,None).query_data()
data=[QueryAllData.serialization(item,remove={'operator','unique_tag','create_time','beneficiary','description','create_time','config_key','income','output','operator_id'}) for item in querydata]
return data,count
def accout_list_data(db:Session,dbname,params):
accout_filters = []
if params.get("id"):
accout_filters.append(dbname.guild_id == params.get("guild_id"))
if params.get("cont"):
accout_filters.append(dbname.cont == params.get("cont"))
if params.get("money"):
accout_filters.append(dbname.money == params.get("money"))
if params.get("url"):
accout_filters.append(dbname.url == params.get("url"))
if params.get("uuid"):
accout_filters.append(dbname.uuid == params.get("uuid"))
if params.get("start_time"):
accout_filters.append(dbname.create_time >= params.get("start_time"))
if params.get("end_time"):
accout_filters.append(dbname.create_time <= params.get("end_time"))
querydata, count = QueryAllData(db, dbname, params,accout_filters).query_data()
data = [QueryAllData.serialization(item) for item in querydata]
return data, count
from typing import Optional from typing import Optional
from pydantic import BaseModel from pydantic import BaseModel, validator
class StatementBase(BaseModel): class StatementBase(BaseModel):
...@@ -44,12 +44,59 @@ class UserWithdrawalList(BaseModel): ...@@ -44,12 +44,59 @@ class UserWithdrawalList(BaseModel):
class GuildWithdrawalList(BaseModel): class GuildWithdrawalList(BaseModel):
page: Optional[int] = None page: int
size: Optional[int] = None size: int
start_time: Optional[str] = "" start_time: str =None
end_time: Optional[str] = "" end_time: str=None
status: Optional[int] = None status: int=None
guild_id: Optional[int] = None guild_id: int=None
class PaymentWithdrawalList(BaseModel):
page:int =None
size:int =None
start_time: str =None
end_time: str=None
status: int=None
guild_id: int=None
@validator('start_time')
def validate_start_time(cls, val):
if val !='':
return val
@validator('end_time')
def validate_end_time(cls, val):
if val !='':
return val
@validator('status')
def validate_status(cls, val):
if val !='':
return val
@validator('guild_id')
def validate_guild_id(cls, val):
if val !='':
return val
class PaymentAdd(BaseModel):
uuid:str
money:int
cont:str
url:str
def __str__(self):
return "uuid:%s, money:%s" % (str(self.uuid), self.money)
class PaymentAccountlList(PaymentWithdrawalList):
uuid : int=None
class GuildUpdate(BaseModel): class GuildUpdate(BaseModel):
......
from datetime import datetime
from app.api.statement import crud, schemas from app.api.statement import crud, schemas
from app.api.account import schemas as acc_schemas from app.api.account import schemas as acc_schemas
from app import get_db from app import get_db
from fastapi import Depends, APIRouter from fastapi import Depends, APIRouter, File
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from app.api.statement.crud import RechargeStatement, WithdrawStatement from app.api.statement.crud import RechargeStatement, WithdrawStatement
from app.api.statement.guild import GuildSet from app.api.statement.guild import GuildSet, paymentset_guild_data, outon_account_data, accout_list_data
from app.api.statement.schemas import PaymentWithdrawalList, PaymentAdd, PaymentAccountlList
from libs.img_code import new_upload_file, random_number
from libs.result_format import HttpResultResponse from libs.result_format import HttpResultResponse
from models.recharge import Paymentlog, Fiaccount
router = APIRouter() router = APIRouter()
...@@ -45,6 +49,52 @@ def guild_settlement_list(data: schemas.GuildWithdrawalList, db: Session = Depen ...@@ -45,6 +49,52 @@ def guild_settlement_list(data: schemas.GuildWithdrawalList, db: Session = Depen
return HttpResultResponse(total=total, data=guild_list) return HttpResultResponse(total=total, data=guild_list)
@router.post("/guild/payment")
def guild_payment_list(data:PaymentWithdrawalList,db: Session = Depends(get_db)):
"""打款记录"""
payment_list, total = paymentset_guild_data(db,Paymentlog,data.dict(exclude_none=True))
return HttpResultResponse(total=total, data=payment_list)
@router.post('/upload')
async def uploadFile(a_file: bytes = File(...)):
a_content = a_file
image_name=random_number()
cos_path =new_upload_file(a_content,image_name)
return HttpResultResponse(data=cos_path)
@router.post("/payment/add")
def guild_payment_list(data:PaymentAdd,db: Session = Depends(get_db)):
"""提交打款"""
dataPayment=Paymentlog(uuid=data.uuid,money=data.money,cont=data.cont,url=data.url,create_time=datetime.now())
db.add(dataPayment)
db.commit()
db.refresh(dataPayment)
db.close()
return HttpResultResponse(data='')
@router.get("/onaccount")
def outon_account(db: Session = Depends(get_db)):
"""转出和转入账户人员列表"""
account_list, total = outon_account_data(db, Fiaccount,{})
return HttpResultResponse(total=total, data=account_list)
@router.post("/transferlist")
def outon_account(data:PaymentAccountlList,db: Session = Depends(get_db)):
"""转账列表"""
account_list, total = accout_list_data(db, Fiaccount,data.dict(exclude_none=True))
return HttpResultResponse(total=total, data=account_list)
@router.post("/guild/update") @router.post("/guild/update")
def guild_update_list(data: schemas.GuildUpdate, db: Session = Depends(get_db)): def guild_update_list(data: schemas.GuildUpdate, db: Session = Depends(get_db)):
"""公会结算""" """公会结算"""
......
...@@ -56,8 +56,10 @@ def authenticate_user(db: Session, form_data: UserLoginForm): ...@@ -56,8 +56,10 @@ def authenticate_user(db: Session, form_data: UserLoginForm):
if md5_password != user_data.hashed_password: if md5_password != user_data.hashed_password:
return {"result": False, "msg": "密码错误"} return {"result": False, "msg": "密码错误"}
# 验证码检查 # 验证码检查
if form_data.verify.lower() != session.headers.get("verify").lower(): print(form_data.verify)
return {"result": False, "msg": "验证码错误"} print(session.headers)
# if form_data.verify.lower() != session.headers.get("verify").lower():
# return {"result": False, "msg": "验证码错误"}
if user_data.google_key: if user_data.google_key:
return {"result": True, "msg": "验证通过", "google_key": "1"} return {"result": True, "msg": "验证通过", "google_key": "1"}
else: else:
......
...@@ -3,9 +3,32 @@ ...@@ -3,9 +3,32 @@
切换环境,修改文档后面的命令 env = ... 切换环境,修改文档后面的命令 env = ...
""" """
import os
from pydantic import BaseSettings from pydantic import BaseSettings
from qcloud_cos import CosConfig
from qcloud_cos import CosS3Client
SITE_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
FILE_PATH = os.path.join(SITE_ROOT, "runtime", "images")
region ='ap-guangzhou'
token = None
scheme ='https'
# Bucket ="3yakj-center-1256890024" #正式桶
Bucket="fj-dc-test-1256890024" #测试桶
# Bucket="3yakj" #测试桶
# secret_id='LTAI5t6JyitXaGoGdU7mga7o'
# secret_key='Qg69gRB0vTThX5Ur6tlqgbmvqRaUjE'
secret_id='AKIDra5Ur292g4FCzYrwmMhAOQFsHSP9wb3S'
secret_key='JRKyzpSr1wc5OXXUFsGPKtKfsvqcEcqw'
config = CosConfig(Region=region, SecretId=secret_id, SecretKey=secret_key, Token=token, Scheme=scheme)
client = CosS3Client(config)
COS_PATH ='https://fj-dc-test-1256890024.cos.ap-guangzhou.myqcloud.com' #测试
COS_RERURN_PATH='/images/'
class Env(BaseSettings): class Env(BaseSettings):
DEBUG: bool = True DEBUG: bool = True
...@@ -25,6 +48,7 @@ class Env(BaseSettings): ...@@ -25,6 +48,7 @@ class Env(BaseSettings):
PASSWORD: str = "fj123456" PASSWORD: str = "fj123456"
class TestingEnv(Env): class TestingEnv(Env):
"""测试环境配置""" """测试环境配置"""
TESTING: bool = True TESTING: bool = True
......
/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
/* Greenlet object interface */
#ifndef Py_GREENLETOBJECT_H
#define Py_GREENLETOBJECT_H
#include <Python.h>
#ifdef __cplusplus
extern "C" {
#endif
/* This is deprecated and undocumented. It does not change. */
#define GREENLET_VERSION "1.0.0"
#ifndef GREENLET_MODULE
#define implementation_ptr_t void*
#endif
typedef struct _greenlet {
PyObject_HEAD
PyObject* weakreflist;
PyObject* dict;
implementation_ptr_t pimpl;
} PyGreenlet;
#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
/* C API functions */
/* Total number of symbols that are exported */
#define PyGreenlet_API_pointers 12
#define PyGreenlet_Type_NUM 0
#define PyExc_GreenletError_NUM 1
#define PyExc_GreenletExit_NUM 2
#define PyGreenlet_New_NUM 3
#define PyGreenlet_GetCurrent_NUM 4
#define PyGreenlet_Throw_NUM 5
#define PyGreenlet_Switch_NUM 6
#define PyGreenlet_SetParent_NUM 7
#define PyGreenlet_MAIN_NUM 8
#define PyGreenlet_STARTED_NUM 9
#define PyGreenlet_ACTIVE_NUM 10
#define PyGreenlet_GET_PARENT_NUM 11
#ifndef GREENLET_MODULE
/* This section is used by modules that uses the greenlet C API */
static void** _PyGreenlet_API = NULL;
# define PyGreenlet_Type \
(*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
# define PyExc_GreenletError \
((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
# define PyExc_GreenletExit \
((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
/*
* PyGreenlet_New(PyObject *args)
*
* greenlet.greenlet(run, parent=None)
*/
# define PyGreenlet_New \
(*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
_PyGreenlet_API[PyGreenlet_New_NUM])
/*
* PyGreenlet_GetCurrent(void)
*
* greenlet.getcurrent()
*/
# define PyGreenlet_GetCurrent \
(*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
/*
* PyGreenlet_Throw(
* PyGreenlet *greenlet,
* PyObject *typ,
* PyObject *val,
* PyObject *tb)
*
* g.throw(...)
*/
# define PyGreenlet_Throw \
(*(PyObject * (*)(PyGreenlet * self, \
PyObject * typ, \
PyObject * val, \
PyObject * tb)) \
_PyGreenlet_API[PyGreenlet_Throw_NUM])
/*
* PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
*
* g.switch(*args, **kwargs)
*/
# define PyGreenlet_Switch \
(*(PyObject * \
(*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
_PyGreenlet_API[PyGreenlet_Switch_NUM])
/*
* PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
*
* g.parent = new_parent
*/
# define PyGreenlet_SetParent \
(*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
_PyGreenlet_API[PyGreenlet_SetParent_NUM])
/*
* PyGreenlet_GetParent(PyObject* greenlet)
*
* return greenlet.parent;
*
* This could return NULL even if there is no exception active.
* If it does not return NULL, you are responsible for decrementing the
* reference count.
*/
# define PyGreenlet_GetParent \
(*(PyGreenlet* (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
/*
* deprecated, undocumented alias.
*/
# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
# define PyGreenlet_MAIN \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_MAIN_NUM])
# define PyGreenlet_STARTED \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_STARTED_NUM])
# define PyGreenlet_ACTIVE \
(*(int (*)(PyGreenlet*)) \
_PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
/* Macro that imports greenlet and initializes C API */
/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
keep the older definition to be sure older code that might have a copy of
the header still works. */
# define PyGreenlet_Import() \
{ \
_PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
}
#endif /* GREENLET_MODULE */
#ifdef __cplusplus
}
#endif
#endif /* !Py_GREENLETOBJECT_H */
Copyright (c) 2010, 2013 PyMySQL contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Metadata-Version: 2.1
Name: PyMySQL
Version: 1.0.2
Summary: Pure Python MySQL Driver
Home-page: https://github.com/PyMySQL/PyMySQL/
Author: yutaka.matsubara
Author-email: yutaka.matsubara@gmail.com
Maintainer: Inada Naoki
Maintainer-email: songofacandy@gmail.com
License: "MIT"
Project-URL: Documentation, https://pymysql.readthedocs.io/
Keywords: MySQL
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Database
Requires-Python: >=3.6
Provides-Extra: ed25519
Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
Provides-Extra: rsa
Requires-Dist: cryptography ; extra == 'rsa'
.. image:: https://readthedocs.org/projects/pymysql/badge/?version=latest
:target: https://pymysql.readthedocs.io/
:alt: Documentation Status
.. image:: https://coveralls.io/repos/PyMySQL/PyMySQL/badge.svg?branch=master&service=github
:target: https://coveralls.io/github/PyMySQL/PyMySQL?branch=master
.. image:: https://img.shields.io/lgtm/grade/python/g/PyMySQL/PyMySQL.svg?logo=lgtm&logoWidth=18
:target: https://lgtm.com/projects/g/PyMySQL/PyMySQL/context:python
PyMySQL
=======
.. contents:: Table of Contents
:local:
This package contains a pure-Python MySQL client library, based on `PEP 249`_.
Most public APIs are compatible with mysqlclient and MySQLdb.
NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`,
`store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_.
But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover
their usecase.
.. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/
Requirements
-------------
* Python -- one of the following:
- CPython_ : 3.6 and newer
- PyPy_ : Latest 3.x version
* MySQL Server -- one of the following:
- MySQL_ >= 5.6
- MariaDB_ >= 10.0
.. _CPython: https://www.python.org/
.. _PyPy: https://pypy.org/
.. _MySQL: https://www.mysql.com/
.. _MariaDB: https://mariadb.org/
Installation
------------
Package is uploaded on `PyPI <https://pypi.org/project/PyMySQL>`_.
You can install it with pip::
$ python3 -m pip install PyMySQL
To use "sha256_password" or "caching_sha2_password" for authenticate,
you need to install additional dependency::
$ python3 -m pip install PyMySQL[rsa]
To use MariaDB's "ed25519" authentication method, you need to install
additional dependency::
$ python3 -m pip install PyMySQL[ed25519]
Documentation
-------------
Documentation is available online: https://pymysql.readthedocs.io/
For support, please refer to the `StackOverflow
<https://stackoverflow.com/questions/tagged/pymysql>`_.
Example
-------
The following examples make use of a simple table
.. code:: sql
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`email` varchar(255) COLLATE utf8_bin NOT NULL,
`password` varchar(255) COLLATE utf8_bin NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin
AUTO_INCREMENT=1 ;
.. code:: python
import pymysql.cursors
# Connect to the database
connection = pymysql.connect(host='localhost',
user='user',
password='passwd',
database='db',
cursorclass=pymysql.cursors.DictCursor)
with connection:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
with connection.cursor() as cursor:
# Read a single record
sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
cursor.execute(sql, ('webmaster@python.org',))
result = cursor.fetchone()
print(result)
This example will print:
.. code:: python
{'password': 'very-secret', 'id': 1}
Resources
---------
* DB-API 2.0: https://www.python.org/dev/peps/pep-0249/
* MySQL Reference Manuals: https://dev.mysql.com/doc/
* MySQL client/server protocol:
https://dev.mysql.com/doc/internals/en/client-server-protocol.html
* "Connector" channel in MySQL Community Slack:
https://lefred.be/mysql-community-on-slack/
* PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users
License
-------
PyMySQL is released under the MIT License. See LICENSE for more information.
PyMySQL-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
PyMySQL-1.0.2.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
PyMySQL-1.0.2.dist-info/METADATA,sha256=hz4Fdo8sOFKcNqZ8wp4Bp-txNCOBCnw9-leYR7QBZ5I,5119
PyMySQL-1.0.2.dist-info/RECORD,,
PyMySQL-1.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
PyMySQL-1.0.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
PyMySQL-1.0.2.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
pymysql/__init__.py,sha256=XL7skPUK4cbKiek68T0vMob-L4YkIRLb2KX4hdMZVvM,4391
pymysql/__pycache__/__init__.cpython-38.pyc,,
pymysql/__pycache__/_auth.cpython-38.pyc,,
pymysql/__pycache__/charset.cpython-38.pyc,,
pymysql/__pycache__/connections.cpython-38.pyc,,
pymysql/__pycache__/converters.cpython-38.pyc,,
pymysql/__pycache__/cursors.cpython-38.pyc,,
pymysql/__pycache__/err.cpython-38.pyc,,
pymysql/__pycache__/optionfile.cpython-38.pyc,,
pymysql/__pycache__/protocol.cpython-38.pyc,,
pymysql/__pycache__/times.cpython-38.pyc,,
pymysql/_auth.py,sha256=l1VtBwDpCtTkalgYQFASO-rj-vEd3DGYR8g-eQjNF1U,7399
pymysql/charset.py,sha256=JCvshFnNf4vzkpXc6uPCyg07qGNfZaVZoxrFqzVlKFs,10293
pymysql/connections.py,sha256=EwKWqFIWlx6kbOeDFIhMFpjJ9-pyF140E5ouKgrrYfY,51251
pymysql/constants/CLIENT.py,sha256=SSvMFPZCTVMU1UWa4zOrfhYMDdR2wG2mS0E5GzJhDsg,878
pymysql/constants/COMMAND.py,sha256=TGITAUcNWlq2Gwg2wv5UK2ykdTd4LYTk_EcJJOCpGIc,679
pymysql/constants/CR.py,sha256=oHyD9dnR1DUX7hd42rcamMnFrWhjUZz7E4S6qQWSQb4,1927
pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296
pymysql/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370
pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
pymysql/constants/SERVER_STATUS.py,sha256=m28Iq5JGCFCWLhafE73-iOvw_9gDGqnytW3NkHpbugA,333
pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pymysql/constants/__pycache__/CLIENT.cpython-38.pyc,,
pymysql/constants/__pycache__/COMMAND.cpython-38.pyc,,
pymysql/constants/__pycache__/CR.cpython-38.pyc,,
pymysql/constants/__pycache__/ER.cpython-38.pyc,,
pymysql/constants/__pycache__/FIELD_TYPE.cpython-38.pyc,,
pymysql/constants/__pycache__/FLAG.cpython-38.pyc,,
pymysql/constants/__pycache__/SERVER_STATUS.cpython-38.pyc,,
pymysql/constants/__pycache__/__init__.cpython-38.pyc,,
pymysql/converters.py,sha256=MBXTOCXSyewMculaRliBEzPVkOKXLiRMqvIXih9Akrg,9430
pymysql/cursors.py,sha256=1E79f3vysxygyfZMhvR6-yFDfysRn3Go8xZTywteh4o,15366
pymysql/err.py,sha256=bpxayM4IUnFQAd8bUZ3PFsFomi9QSfBk-0TJXyKU2FI,3773
pymysql/optionfile.py,sha256=ehPrZW4d7pcEvXGAEpsKgLdXpFnIQD93yF7T_jHjoRk,573
pymysql/protocol.py,sha256=Ur8xXkVvyFc6m5CA34QrHBasADvS_NPFsWU-Q3flRYA,11859
pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
Wheel-Version: 1.0
Generator: bdist_wheel (0.36.2)
Root-Is-Purelib: true
Tag: py3-none-any
Copyright 2005-2023 SQLAlchemy authors and contributors <see AUTHORS file>.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Metadata-Version: 2.1
Name: SQLAlchemy
Version: 2.0.0
Summary: Database Abstraction Library
Home-page: https://www.sqlalchemy.org
Author: Mike Bayer
Author-email: mike_mp@zzzcomputing.com
License: MIT
Project-URL: Documentation, https://docs.sqlalchemy.org
Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Database :: Front-Ends
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE
Requires-Dist: typing-extensions (>=4.2.0)
Requires-Dist: greenlet (!=0.4.17) ; platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))
Requires-Dist: importlib-metadata ; python_version < "3.8"
Provides-Extra: aiomysql
Requires-Dist: greenlet (!=0.4.17) ; extra == 'aiomysql'
Requires-Dist: aiomysql ; extra == 'aiomysql'
Provides-Extra: aiosqlite
Requires-Dist: greenlet (!=0.4.17) ; extra == 'aiosqlite'
Requires-Dist: aiosqlite ; extra == 'aiosqlite'
Requires-Dist: typing-extensions (!=3.10.0.1) ; extra == 'aiosqlite'
Provides-Extra: asyncio
Requires-Dist: greenlet (!=0.4.17) ; extra == 'asyncio'
Provides-Extra: asyncmy
Requires-Dist: greenlet (!=0.4.17) ; extra == 'asyncmy'
Requires-Dist: asyncmy (!=0.2.4,>=0.2.3) ; extra == 'asyncmy'
Provides-Extra: mariadb_connector
Requires-Dist: mariadb (!=1.1.2,!=1.1.5,>=1.0.1) ; extra == 'mariadb_connector'
Provides-Extra: mssql
Requires-Dist: pyodbc ; extra == 'mssql'
Provides-Extra: mssql_pymssql
Requires-Dist: pymssql ; extra == 'mssql_pymssql'
Provides-Extra: mssql_pyodbc
Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
Provides-Extra: mypy
Requires-Dist: mypy (>=0.910) ; extra == 'mypy'
Provides-Extra: mysql
Requires-Dist: mysqlclient (>=1.4.0) ; extra == 'mysql'
Provides-Extra: mysql_connector
Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
Provides-Extra: oracle
Requires-Dist: cx-oracle (>=7) ; extra == 'oracle'
Provides-Extra: oracle_oracledb
Requires-Dist: oracledb (>=1.0.1) ; extra == 'oracle_oracledb'
Provides-Extra: postgresql
Requires-Dist: psycopg2 (>=2.7) ; extra == 'postgresql'
Provides-Extra: postgresql_asyncpg
Requires-Dist: greenlet (!=0.4.17) ; extra == 'postgresql_asyncpg'
Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg'
Provides-Extra: postgresql_pg8000
Requires-Dist: pg8000 (>=1.29.1) ; extra == 'postgresql_pg8000'
Provides-Extra: postgresql_psycopg
Requires-Dist: psycopg (>=3.0.7) ; extra == 'postgresql_psycopg'
Provides-Extra: postgresql_psycopg2binary
Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
Provides-Extra: postgresql_psycopg2cffi
Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
Provides-Extra: pymysql
Requires-Dist: pymysql ; extra == 'pymysql'
Provides-Extra: sqlcipher
Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher'
SQLAlchemy
==========
|PyPI| |Python| |Downloads|
.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI
.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Python Version
.. |Downloads| image:: https://img.shields.io/pypi/dm/sqlalchemy
:target: https://pypi.org/project/sqlalchemy
:alt: PyPI - Downloads
The Python SQL Toolkit and Object Relational Mapper
Introduction
-------------
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
that gives application developers the full power and
flexibility of SQL. SQLAlchemy provides a full suite
of well known enterprise-level persistence patterns,
designed for efficient and high-performing database
access, adapted into a simple and Pythonic domain
language.
Major SQLAlchemy features include:
* An industrial strength ORM, built
from the core on the identity map, unit of work,
and data mapper patterns. These patterns
allow transparent persistence of objects
using a declarative configuration system.
Domain models
can be constructed and manipulated naturally,
and changes are synchronized with the
current transaction automatically.
* A relationally-oriented query system, exposing
the full range of SQL's capabilities
explicitly, including joins, subqueries,
correlation, and most everything else,
in terms of the object model.
Writing queries with the ORM uses the same
techniques of relational composition you use
when writing SQL. While you can drop into
literal SQL at any time, it's virtually never
needed.
* A comprehensive and flexible system
of eager loading for related collections and objects.
Collections are cached within a session,
and can be loaded on individual access, all
at once using joins, or by query per collection
across the full result set.
* A Core SQL construction system and DBAPI
interaction layer. The SQLAlchemy Core is
separate from the ORM and is a full database
abstraction layer in its own right, and includes
an extensible Python-based SQL expression
language, schema metadata, connection pooling,
type coercion, and custom types.
* All primary and foreign key constraints are
assumed to be composite and natural. Surrogate
integer primary keys are of course still the
norm, but SQLAlchemy never assumes or hardcodes
to this model.
* Database introspection and generation. Database
schemas can be "reflected" in one step into
Python structures representing database metadata;
those same structures can then generate
CREATE statements right back out - all within
the Core, independent of the ORM.
SQLAlchemy's philosophy:
* SQL databases behave less and less like object
collections the more size and performance start to
matter; object collections behave less and less like
tables and rows the more abstraction starts to matter.
SQLAlchemy aims to accommodate both of these
principles.
* An ORM doesn't need to hide the "R". A relational
database provides rich, set-based functionality
that should be fully exposed. SQLAlchemy's
ORM provides an open-ended set of patterns
that allow a developer to construct a custom
mediation layer between a domain model and
a relational schema, turning the so-called
"object relational impedance" issue into
a distant memory.
* The developer, in all cases, makes all decisions
regarding the design, structure, and naming conventions
of both the object model as well as the relational
schema. SQLAlchemy only provides the means
to automate the execution of these decisions.
* With SQLAlchemy, there's no such thing as
"the ORM generated a bad query" - you
retain full control over the structure of
queries, including how joins are organized,
how subqueries and correlation is used, what
columns are requested. Everything SQLAlchemy
does is ultimately the result of a developer-initiated
decision.
* Don't use an ORM if the problem doesn't need one.
SQLAlchemy consists of a Core and separate ORM
component. The Core offers a full SQL expression
language that allows Pythonic construction
of SQL constructs that render directly to SQL
strings for a target database, returning
result sets that are essentially enhanced DBAPI
cursors.
* Transactions should be the norm. With SQLAlchemy's
ORM, nothing goes to permanent storage until
commit() is called. SQLAlchemy encourages applications
to create a consistent means of delineating
the start and end of a series of operations.
* Never render a literal value in a SQL statement.
Bound parameters are used to the greatest degree
possible, allowing query optimizers to cache
query plans effectively and making SQL injection
attacks a non-issue.
Documentation
-------------
Latest documentation is at:
https://www.sqlalchemy.org/docs/
Installation / Requirements
---------------------------
Full documentation for installation is at
`Installation <https://www.sqlalchemy.org/docs/intro.html#installation>`_.
Getting Help / Development / Bug reporting
------------------------------------------
Please refer to the `SQLAlchemy Community Guide <https://www.sqlalchemy.org/support.html>`_.
Code of Conduct
---------------
Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
constructive communication between users and developers.
Please see our current Code of Conduct at
`Code of Conduct <https://www.sqlalchemy.org/codeofconduct.html>`_.
License
-------
SQLAlchemy is distributed under the `MIT license
<https://www.opensource.org/licenses/mit-license.php>`_.
Wheel-Version: 1.0
Generator: bdist_wheel (0.38.4)
Root-Is-Purelib: false
Tag: cp38-cp38-win_amd64
# don't import any costly modules
import sys
import os
is_pypy = '__pypy__' in sys.builtin_module_names
def warn_distutils_present():
if 'distutils' not in sys.modules:
return
if is_pypy and sys.version_info < (3, 7):
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
return
import warnings
warnings.warn(
"Distutils was imported before Setuptools, but importing Setuptools "
"also replaces the `distutils` module in `sys.modules`. This may lead "
"to undesirable behaviors or errors. To avoid these issues, avoid "
"using distutils directly, ensure that setuptools is installed in the "
"traditional way (e.g. not an editable install), and/or make sure "
"that setuptools is always imported before distutils."
)
def clear_distutils():
if 'distutils' not in sys.modules:
return
import warnings
warnings.warn("Setuptools is replacing distutils.")
mods = [
name
for name in sys.modules
if name == "distutils" or name.startswith("distutils.")
]
for name in mods:
del sys.modules[name]
def enabled():
"""
Allow selection of distutils by environment variable.
"""
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
return which == 'local'
def ensure_local_distutils():
import importlib
clear_distutils()
# With the DistutilsMetaFinder in place,
# perform an import to cause distutils to be
# loaded from setuptools._distutils. Ref #2906.
with shim():
importlib.import_module('distutils')
# check that submodules load as expected
core = importlib.import_module('distutils.core')
assert '_distutils' in core.__file__, core.__file__
assert 'setuptools._distutils.log' not in sys.modules
def do_override():
"""
Ensure that the local copy of distutils is preferred over stdlib.
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
if enabled():
warn_distutils_present()
ensure_local_distutils()
class _TrivialRe:
def __init__(self, *patterns):
self._patterns = patterns
def match(self, string):
return all(pat in string for pat in self._patterns)
class DistutilsMetaFinder:
def find_spec(self, fullname, path, target=None):
# optimization: only consider top level modules and those
# found in the CPython test suite.
if path is not None and not fullname.startswith('test.'):
return
method_name = 'spec_for_{fullname}'.format(**locals())
method = getattr(self, method_name, lambda: None)
return method()
def spec_for_distutils(self):
if self.is_cpython():
return
import importlib
import importlib.abc
import importlib.util
try:
mod = importlib.import_module('setuptools._distutils')
except Exception:
# There are a couple of cases where setuptools._distutils
# may not be present:
# - An older Setuptools without a local distutils is
# taking precedence. Ref #2957.
# - Path manipulation during sitecustomize removes
# setuptools from the path but only after the hook
# has been loaded. Ref #2980.
# In either case, fall back to stdlib behavior.
return
class DistutilsLoader(importlib.abc.Loader):
def create_module(self, spec):
mod.__name__ = 'distutils'
return mod
def exec_module(self, module):
pass
return importlib.util.spec_from_loader(
'distutils', DistutilsLoader(), origin=mod.__file__
)
@staticmethod
def is_cpython():
"""
Suppress supplying distutils for CPython (build and tests).
Ref #2965 and #3007.
"""
return os.path.isfile('pybuilddir.txt')
def spec_for_pip(self):
"""
Ensure stdlib distutils when running under pip.
See pypa/pip#8761 for rationale.
"""
if self.pip_imported_during_build():
return
clear_distutils()
self.spec_for_distutils = lambda: None
@classmethod
def pip_imported_during_build(cls):
"""
Detect if pip is being imported in a build script. Ref #2355.
"""
import traceback
return any(
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
)
@staticmethod
def frame_file_is_setup(frame):
"""
Return True if the indicated frame suggests a setup.py file.
"""
# some frames may not have __file__ (#2940)
return frame.f_globals.get('__file__', '').endswith('setup.py')
def spec_for_sensitive_tests(self):
"""
Ensure stdlib distutils when running select tests under CPython.
python/cpython#91169
"""
clear_distutils()
self.spec_for_distutils = lambda: None
sensitive_tests = (
[
'test.test_distutils',
'test.test_peg_generator',
'test.test_importlib',
]
if sys.version_info < (3, 10)
else [
'test.test_distutils',
]
)
for name in DistutilsMetaFinder.sensitive_tests:
setattr(
DistutilsMetaFinder,
f'spec_for_{name}',
DistutilsMetaFinder.spec_for_sensitive_tests,
)
DISTUTILS_FINDER = DistutilsMetaFinder()
def add_shim():
DISTUTILS_FINDER in sys.meta_path or insert_shim()
class shim:
def __enter__(self):
insert_shim()
def __exit__(self, exc, value, tb):
remove_shim()
def insert_shim():
sys.meta_path.insert(0, DISTUTILS_FINDER)
def remove_shim():
try:
sys.meta_path.remove(DISTUTILS_FINDER)
except ValueError:
pass
__import__('_distutils_hack').do_override()
The MIT License (MIT)
Copyright (c) 2018 Alex Grönholm
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Metadata-Version: 2.1
Name: anyio
Version: 3.6.2
Summary: High level compatibility layer for multiple asynchronous event loop implementations
Author: Alex Grönholm
Author-email: alex.gronholm@nextday.fi
License: MIT
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
Project-URL: Source code, https://github.com/agronholm/anyio
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Framework :: AnyIO
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Requires-Python: >=3.6.2
License-File: LICENSE
Requires-Dist: idna (>=2.8)
Requires-Dist: sniffio (>=1.1)
Requires-Dist: contextvars ; python_version < "3.7"
Requires-Dist: dataclasses ; python_version < "3.7"
Requires-Dist: typing-extensions ; python_version < "3.8"
Provides-Extra: doc
Requires-Dist: packaging ; extra == 'doc'
Requires-Dist: sphinx-rtd-theme ; extra == 'doc'
Requires-Dist: sphinx-autodoc-typehints (>=1.2.0) ; extra == 'doc'
Provides-Extra: test
Requires-Dist: coverage[toml] (>=4.5) ; extra == 'test'
Requires-Dist: hypothesis (>=4.0) ; extra == 'test'
Requires-Dist: pytest (>=7.0) ; extra == 'test'
Requires-Dist: pytest-mock (>=3.6.1) ; extra == 'test'
Requires-Dist: trustme ; extra == 'test'
Requires-Dist: contextlib2 ; (python_version < "3.7") and extra == 'test'
Requires-Dist: uvloop (<0.15) ; (python_version < "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test'
Requires-Dist: mock (>=4) ; (python_version < "3.8") and extra == 'test'
Requires-Dist: uvloop (>=0.15) ; (python_version >= "3.7" and (platform_python_implementation == "CPython" and platform_system != "Windows")) and extra == 'test'
Provides-Extra: trio
Requires-Dist: trio (<0.22,>=0.16) ; extra == 'trio'
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
:alt: Build Status
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
:target: https://coveralls.io/github/agronholm/anyio?branch=master
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
:alt: Documentation
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
:target: https://gitter.im/python-trio/AnyIO
:alt: Gitter chat
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio, and works in harmony
with the native SC of trio itself.
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
refactoring necessary. It will blend in with native libraries of your chosen backend.
Documentation
-------------
View full documentation at: https://anyio.readthedocs.io/
Features
--------
AnyIO offers the following functionality:
* Task groups (nurseries_ in trio terminology)
* High level networking (TCP, UDP and UNIX sockets)
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
3.8)
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
Protocols)
* A versatile API for byte streams and object streams
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
streams)
* Worker threads
* Subprocesses
* Asynchronous file I/O (using worker threads)
* Signal handling
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
It even works with the popular Hypothesis_ library.
.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _trio: https://github.com/python-trio/trio
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
.. _pytest: https://docs.pytest.org/en/latest/
.. _Hypothesis: https://hypothesis.works/
anyio-3.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
anyio-3.6.2.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
anyio-3.6.2.dist-info/METADATA,sha256=30LcHBTF6U8AO5jpASPwKp9dqHL3t0MuCADwSLWMFrc,4660
anyio-3.6.2.dist-info/RECORD,,
anyio-3.6.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio-3.6.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
anyio-3.6.2.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
anyio-3.6.2.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
anyio/__init__.py,sha256=M2R8dk6L5gL5lXHArzpSfEn2oH5jMyUKhzyrkRiv2AM,4037
anyio/__pycache__/__init__.cpython-38.pyc,,
anyio/__pycache__/from_thread.cpython-38.pyc,,
anyio/__pycache__/lowlevel.cpython-38.pyc,,
anyio/__pycache__/pytest_plugin.cpython-38.pyc,,
anyio/__pycache__/to_process.cpython-38.pyc,,
anyio/__pycache__/to_thread.cpython-38.pyc,,
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_backends/__pycache__/__init__.cpython-38.pyc,,
anyio/_backends/__pycache__/_asyncio.cpython-38.pyc,,
anyio/_backends/__pycache__/_trio.cpython-38.pyc,,
anyio/_backends/_asyncio.py,sha256=ZJDvRwfS4wv9WWcqWledNJyl8hx8A8-m9-gSKAJ6nBM,69238
anyio/_backends/_trio.py,sha256=CebCaqr8Szi6uCnUzwtBRLfUitR5OnDT_wfH-KiqvBQ,29696
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/_core/__pycache__/__init__.cpython-38.pyc,,
anyio/_core/__pycache__/_compat.cpython-38.pyc,,
anyio/_core/__pycache__/_eventloop.cpython-38.pyc,,
anyio/_core/__pycache__/_exceptions.cpython-38.pyc,,
anyio/_core/__pycache__/_fileio.cpython-38.pyc,,
anyio/_core/__pycache__/_resources.cpython-38.pyc,,
anyio/_core/__pycache__/_signals.cpython-38.pyc,,
anyio/_core/__pycache__/_sockets.cpython-38.pyc,,
anyio/_core/__pycache__/_streams.cpython-38.pyc,,
anyio/_core/__pycache__/_subprocesses.cpython-38.pyc,,
anyio/_core/__pycache__/_synchronization.cpython-38.pyc,,
anyio/_core/__pycache__/_tasks.cpython-38.pyc,,
anyio/_core/__pycache__/_testing.cpython-38.pyc,,
anyio/_core/__pycache__/_typedattr.cpython-38.pyc,,
anyio/_core/_compat.py,sha256=X99W70r-O-JLdkKNtbddcIY5H2Nyg3Nk34oUYE9WZRs,5790
anyio/_core/_eventloop.py,sha256=DRn_hy679LtsJFsPX7dXjDv72bLtSFkTnWY9WVVfgCQ,4108
anyio/_core/_exceptions.py,sha256=1wqraNldZroYkoyB0HZStAruz_7yDCBaW-4zYwsKj8s,2904
anyio/_core/_fileio.py,sha256=au82uZXZX4fia8EoZq_E-JDwZFKe6ZtI0J6IkxK8FmQ,18298
anyio/_core/_resources.py,sha256=M_uN-90N8eSsWuvo-0xluWU_OG2BTyccAgsQ7XtHxzs,399
anyio/_core/_signals.py,sha256=D4btJN527tAADspKBeNKaCds-ZcEZJP8LWM_MjVuQRA,827
anyio/_core/_sockets.py,sha256=fW_Cbg6kfw4xgYuVuWbcWrAYspOcDSEjwxVATMzf2fo,19820
anyio/_core/_streams.py,sha256=gjT5xChJ1OoV8nNinljSv1yW4nqUS-QzZzIydQz3exQ,1494
anyio/_core/_subprocesses.py,sha256=pcchMI2OII0QSjiVxRiTEz4M0B7TlQPzGurfCuka-xc,5049
anyio/_core/_synchronization.py,sha256=xOOG4hF9783N6E2IcD3YKiukguA5bPrj6BodDsKNaJY,16822
anyio/_core/_tasks.py,sha256=ebGLjHvwL6I9aGyPwvCig1drebSVYFzvY3pnN3TsB4o,5273
anyio/_core/_testing.py,sha256=VZka_yebIhJ6mJ6Vo_ilO3Nbz53ieqg0WBijwciMwdY,2196
anyio/_core/_typedattr.py,sha256=k5-wBvMlDlKHIpn18INVnXAlGwI3CrAvPmWoceHjnOQ,2534
anyio/abc/__init__.py,sha256=hMa47CMs5O1twC2bBcSbzwX-3Q08BAgAPTRekQobb3E,2123
anyio/abc/__pycache__/__init__.cpython-38.pyc,,
anyio/abc/__pycache__/_resources.cpython-38.pyc,,
anyio/abc/__pycache__/_sockets.cpython-38.pyc,,
anyio/abc/__pycache__/_streams.cpython-38.pyc,,
anyio/abc/__pycache__/_subprocesses.cpython-38.pyc,,
anyio/abc/__pycache__/_tasks.cpython-38.pyc,,
anyio/abc/__pycache__/_testing.cpython-38.pyc,,
anyio/abc/_resources.py,sha256=js737mWPG6IW0fH8W4Tz9eNWLztse7dKxEC61z934Vk,752
anyio/abc/_sockets.py,sha256=i1VdcJTLAuRlYeZoL6s5RBSWbX62Cu6ln5YZBL2YrWk,5754
anyio/abc/_streams.py,sha256=0g70fhKAzbnK0KKmWwRgwmKdApBwduAcVj4TpjSzjzU,6501
anyio/abc/_subprocesses.py,sha256=iREP_YQ91it88lDU4XIcI3HZ9HUvV5UmjQk_sSPonrw,2071
anyio/abc/_tasks.py,sha256=mQQd1DANqpySKyehVVPdMfi_UEG49zZUJpt5blunOjg,3119
anyio/abc/_testing.py,sha256=ifKCUPzcQdHAEGO-weu2GQvzjMQPPIWO24mQ0z6zkdU,1928
anyio/from_thread.py,sha256=nSq6mafYMqwxKmzdJyISg8cp-AyBj9rxZPMt_b7klSM,16497
anyio/lowlevel.py,sha256=W4ydshns7f86YuSESFc2igTf46AWMXnGPQGsY_Esl2E,4679
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/pytest_plugin.py,sha256=kWj2B8BJehePJd1sztRBmJBRh8O4hk1oGSYQRlX5Gr8,5134
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
anyio/streams/__pycache__/__init__.cpython-38.pyc,,
anyio/streams/__pycache__/buffered.cpython-38.pyc,,
anyio/streams/__pycache__/file.cpython-38.pyc,,
anyio/streams/__pycache__/memory.cpython-38.pyc,,
anyio/streams/__pycache__/stapled.cpython-38.pyc,,
anyio/streams/__pycache__/text.cpython-38.pyc,,
anyio/streams/__pycache__/tls.cpython-38.pyc,,
anyio/streams/buffered.py,sha256=FegOSO4Xcxa5SaDfU1A3ZkTTxaPrv6G435Y_giZ8k44,4437
anyio/streams/file.py,sha256=pujJ-m6BX-gOLnVoZwkE5kh-YDs5Vx9eJFVkvliQ0S4,4353
anyio/streams/memory.py,sha256=3RGeZoevoGIgBWfD2_X1cqxIPOz-BqQkRf6lUcOnBYc,9209
anyio/streams/stapled.py,sha256=0E0V15v8M5GVelpHe5RT0S33tQ9hGe4ZCXo_KJEjtt4,4258
anyio/streams/text.py,sha256=WRFyjsRpBjQKdCmR4ZuzYTEAJqGx2s5oTJmGI1C6Ng0,5014
anyio/streams/tls.py,sha256=-WXGsMV14XHXAxc38WpBvGusjuY7e449g4UCEHIlnWw,12040
anyio/to_process.py,sha256=hu0ES3HJC-VEjcdPJMzAzjyTaekaCNToO3coj3jvnus,9247
anyio/to_thread.py,sha256=VeMQoo8Va2zz0WFk2p123QikDpqk2wYZGw20COC3wqw,2124
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: true
Tag: py3-none-any
__all__ = (
"maybe_async",
"maybe_async_cm",
"run",
"sleep",
"sleep_forever",
"sleep_until",
"current_time",
"get_all_backends",
"get_cancelled_exc_class",
"BrokenResourceError",
"BrokenWorkerProcess",
"BusyResourceError",
"ClosedResourceError",
"DelimiterNotFound",
"EndOfStream",
"ExceptionGroup",
"IncompleteRead",
"TypedAttributeLookupError",
"WouldBlock",
"AsyncFile",
"Path",
"open_file",
"wrap_file",
"aclose_forcefully",
"open_signal_receiver",
"connect_tcp",
"connect_unix",
"create_tcp_listener",
"create_unix_listener",
"create_udp_socket",
"create_connected_udp_socket",
"getaddrinfo",
"getnameinfo",
"wait_socket_readable",
"wait_socket_writable",
"create_memory_object_stream",
"run_process",
"open_process",
"create_lock",
"CapacityLimiter",
"CapacityLimiterStatistics",
"Condition",
"ConditionStatistics",
"Event",
"EventStatistics",
"Lock",
"LockStatistics",
"Semaphore",
"SemaphoreStatistics",
"create_condition",
"create_event",
"create_semaphore",
"create_capacity_limiter",
"open_cancel_scope",
"fail_after",
"move_on_after",
"current_effective_deadline",
"TASK_STATUS_IGNORED",
"CancelScope",
"create_task_group",
"TaskInfo",
"get_current_task",
"get_running_tasks",
"wait_all_tasks_blocked",
"run_sync_in_worker_thread",
"run_async_from_thread",
"run_sync_from_thread",
"current_default_worker_thread_limiter",
"create_blocking_portal",
"start_blocking_portal",
"typed_attribute",
"TypedAttributeSet",
"TypedAttributeProvider",
)
from typing import Any
from ._core._compat import maybe_async, maybe_async_cm
from ._core._eventloop import (
current_time,
get_all_backends,
get_cancelled_exc_class,
run,
sleep,
sleep_forever,
sleep_until,
)
from ._core._exceptions import (
BrokenResourceError,
BrokenWorkerProcess,
BusyResourceError,
ClosedResourceError,
DelimiterNotFound,
EndOfStream,
ExceptionGroup,
IncompleteRead,
TypedAttributeLookupError,
WouldBlock,
)
from ._core._fileio import AsyncFile, Path, open_file, wrap_file
from ._core._resources import aclose_forcefully
from ._core._signals import open_signal_receiver
from ._core._sockets import (
connect_tcp,
connect_unix,
create_connected_udp_socket,
create_tcp_listener,
create_udp_socket,
create_unix_listener,
getaddrinfo,
getnameinfo,
wait_socket_readable,
wait_socket_writable,
)
from ._core._streams import create_memory_object_stream
from ._core._subprocesses import open_process, run_process
from ._core._synchronization import (
CapacityLimiter,
CapacityLimiterStatistics,
Condition,
ConditionStatistics,
Event,
EventStatistics,
Lock,
LockStatistics,
Semaphore,
SemaphoreStatistics,
create_capacity_limiter,
create_condition,
create_event,
create_lock,
create_semaphore,
)
from ._core._tasks import (
TASK_STATUS_IGNORED,
CancelScope,
create_task_group,
current_effective_deadline,
fail_after,
move_on_after,
open_cancel_scope,
)
from ._core._testing import (
TaskInfo,
get_current_task,
get_running_tasks,
wait_all_tasks_blocked,
)
from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute
# Re-exported here, for backwards compatibility
# isort: off
from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread
from .from_thread import (
create_blocking_portal,
run_async_from_thread,
run_sync_from_thread,
start_blocking_portal,
)
# Re-export imports so they look like they live directly in this package
key: str
value: Any
for key, value in list(locals().items()):
if getattr(value, "__module__", "").startswith("anyio."):
value.__module__ = __name__
This diff is collapsed.
This diff is collapsed.
from abc import ABCMeta, abstractmethod
from contextlib import AbstractContextManager
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
AsyncContextManager,
Callable,
ContextManager,
Generator,
Generic,
Iterable,
List,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
)
from warnings import warn
if TYPE_CHECKING:
from ._testing import TaskInfo
else:
TaskInfo = object
T = TypeVar("T")
AnyDeprecatedAwaitable = Union[
"DeprecatedAwaitable",
"DeprecatedAwaitableFloat",
"DeprecatedAwaitableList[T]",
TaskInfo,
]
@overload
async def maybe_async(__obj: TaskInfo) -> TaskInfo:
...
@overload
async def maybe_async(__obj: "DeprecatedAwaitableFloat") -> float:
...
@overload
async def maybe_async(__obj: "DeprecatedAwaitableList[T]") -> List[T]:
...
@overload
async def maybe_async(__obj: "DeprecatedAwaitable") -> None:
...
async def maybe_async(
__obj: "AnyDeprecatedAwaitable[T]",
) -> Union[TaskInfo, float, List[T], None]:
"""
Await on the given object if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were converted from coroutine functions into regular functions.
Do **not** try to use this for any other purpose!
:return: the result of awaiting on the object if coroutine, or the object itself otherwise
.. versionadded:: 2.2
"""
return __obj._unwrap()
class _ContextManagerWrapper:
def __init__(self, cm: ContextManager[T]):
self._cm = cm
async def __aenter__(self) -> T:
return self._cm.__enter__()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return self._cm.__exit__(exc_type, exc_val, exc_tb)
def maybe_async_cm(
cm: Union[ContextManager[T], AsyncContextManager[T]]
) -> AsyncContextManager[T]:
"""
Wrap a regular context manager as an async one if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were changed to return regular context managers instead of async ones.
:param cm: a regular or async context manager
:return: an async context manager
.. versionadded:: 2.2
"""
if not isinstance(cm, AbstractContextManager):
raise TypeError("Given object is not an context manager")
return _ContextManagerWrapper(cm)
def _warn_deprecation(
awaitable: "AnyDeprecatedAwaitable[Any]", stacklevel: int = 1
) -> None:
warn(
f'Awaiting on {awaitable._name}() is deprecated. Use "await '
f"anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x "
f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.',
DeprecationWarning,
stacklevel=stacklevel + 1,
)
class DeprecatedAwaitable:
def __init__(self, func: Callable[..., "DeprecatedAwaitable"]):
self._name = f"{func.__module__}.{func.__qualname__}"
def __await__(self) -> Generator[None, None, None]:
_warn_deprecation(self)
if False:
yield
def __reduce__(self) -> Tuple[Type[None], Tuple[()]]:
return type(None), ()
def _unwrap(self) -> None:
return None
class DeprecatedAwaitableFloat(float):
def __new__(
cls, x: float, func: Callable[..., "DeprecatedAwaitableFloat"]
) -> "DeprecatedAwaitableFloat":
return super().__new__(cls, x)
def __init__(self, x: float, func: Callable[..., "DeprecatedAwaitableFloat"]):
self._name = f"{func.__module__}.{func.__qualname__}"
def __await__(self) -> Generator[None, None, float]:
_warn_deprecation(self)
if False:
yield
return float(self)
def __reduce__(self) -> Tuple[Type[float], Tuple[float]]:
return float, (float(self),)
def _unwrap(self) -> float:
return float(self)
class DeprecatedAwaitableList(List[T]):
def __init__(
self,
iterable: Iterable[T] = (),
*,
func: Callable[..., "DeprecatedAwaitableList[T]"],
):
super().__init__(iterable)
self._name = f"{func.__module__}.{func.__qualname__}"
def __await__(self) -> Generator[None, None, List[T]]:
_warn_deprecation(self)
if False:
yield
return list(self)
def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]:
return list, (list(self),)
def _unwrap(self) -> List[T]:
return list(self)
class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta):
@abstractmethod
def __enter__(self) -> T:
pass
@abstractmethod
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
pass
async def __aenter__(self) -> T:
warn(
f"Using {self.__class__.__name__} as an async context manager has been deprecated. "
f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to '
f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if '
f"you are completely migrating to AnyIO 3+.",
DeprecationWarning,
)
return self.__enter__()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return self.__exit__(exc_type, exc_val, exc_tb)
import math
import sys
import threading
from contextlib import contextmanager
from importlib import import_module
from typing import (
Any,
Callable,
Coroutine,
Dict,
Generator,
Optional,
Tuple,
Type,
TypeVar,
)
import sniffio
# This must be updated when new backends are introduced
from ._compat import DeprecatedAwaitableFloat
BACKENDS = "asyncio", "trio"
T_Retval = TypeVar("T_Retval")
threadlocals = threading.local()
def run(
func: Callable[..., Coroutine[Any, Any, T_Retval]],
*args: object,
backend: str = "asyncio",
backend_options: Optional[Dict[str, Any]] = None,
) -> T_Retval:
"""
Run the given coroutine function in an asynchronous event loop.
The current thread must not be already running an event loop.
:param func: a coroutine function
:param args: positional arguments to ``func``
:param backend: name of the asynchronous event loop implementation – currently either
``asyncio`` or ``trio``
:param backend_options: keyword arguments to call the backend ``run()`` implementation with
(documented :ref:`here <backend options>`)
:return: the return value of the coroutine function
:raises RuntimeError: if an asynchronous event loop is already running in this thread
:raises LookupError: if the named backend is not found
"""
try:
asynclib_name = sniffio.current_async_library()
except sniffio.AsyncLibraryNotFoundError:
pass
else:
raise RuntimeError(f"Already running {asynclib_name} in this thread")
try:
asynclib = import_module(f"..._backends._{backend}", package=__name__)
except ImportError as exc:
raise LookupError(f"No such backend: {backend}") from exc
token = None
if sniffio.current_async_library_cvar.get(None) is None:
# Since we're in control of the event loop, we can cache the name of the async library
token = sniffio.current_async_library_cvar.set(backend)
try:
backend_options = backend_options or {}
return asynclib.run(func, *args, **backend_options)
finally:
if token:
sniffio.current_async_library_cvar.reset(token)
async def sleep(delay: float) -> None:
"""
Pause the current task for the specified duration.
:param delay: the duration, in seconds
"""
return await get_asynclib().sleep(delay)
async def sleep_forever() -> None:
"""
Pause the current task until it's cancelled.
This is a shortcut for ``sleep(math.inf)``.
.. versionadded:: 3.1
"""
await sleep(math.inf)
async def sleep_until(deadline: float) -> None:
"""
Pause the current task until the given time.
:param deadline: the absolute time to wake up at (according to the internal monotonic clock of
the event loop)
.. versionadded:: 3.1
"""
now = current_time()
await sleep(max(deadline - now, 0))
def current_time() -> DeprecatedAwaitableFloat:
"""
Return the current value of the event loop's internal clock.
:return: the clock value (seconds)
"""
return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time)
def get_all_backends() -> Tuple[str, ...]:
"""Return a tuple of the names of all built-in backends."""
return BACKENDS
def get_cancelled_exc_class() -> Type[BaseException]:
"""Return the current async library's cancellation exception class."""
return get_asynclib().CancelledError
#
# Private API
#
@contextmanager
def claim_worker_thread(backend: str) -> Generator[Any, None, None]:
module = sys.modules["anyio._backends._" + backend]
threadlocals.current_async_module = module
try:
yield
finally:
del threadlocals.current_async_module
def get_asynclib(asynclib_name: Optional[str] = None) -> Any:
if asynclib_name is None:
asynclib_name = sniffio.current_async_library()
modulename = "anyio._backends._" + asynclib_name
try:
return sys.modules[modulename]
except KeyError:
return import_module(modulename)
from traceback import format_exception
from typing import List
class BrokenResourceError(Exception):
"""
Raised when trying to use a resource that has been rendered unusable due to external causes
(e.g. a send stream whose peer has disconnected).
"""
class BrokenWorkerProcess(Exception):
"""
Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise
misbehaves.
"""
class BusyResourceError(Exception):
"""Raised when two tasks are trying to read from or write to the same resource concurrently."""
def __init__(self, action: str):
super().__init__(f"Another task is already {action} this resource")
class ClosedResourceError(Exception):
"""Raised when trying to use a resource that has been closed."""
class DelimiterNotFound(Exception):
"""
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
maximum number of bytes has been read without the delimiter being found.
"""
def __init__(self, max_bytes: int) -> None:
super().__init__(
f"The delimiter was not found among the first {max_bytes} bytes"
)
class EndOfStream(Exception):
"""Raised when trying to read from a stream that has been closed from the other end."""
class ExceptionGroup(BaseException):
"""
Raised when multiple exceptions have been raised in a task group.
:var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together
"""
SEPARATOR = "----------------------------\n"
exceptions: List[BaseException]
def __str__(self) -> str:
tracebacks = [
"".join(format_exception(type(exc), exc, exc.__traceback__))
for exc in self.exceptions
]
return (
f"{len(self.exceptions)} exceptions were raised in the task group:\n"
f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}"
)
def __repr__(self) -> str:
exception_reprs = ", ".join(repr(exc) for exc in self.exceptions)
return f"<{self.__class__.__name__}: {exception_reprs}>"
class IncompleteRead(Exception):
"""
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
connection is closed before the requested amount of bytes has been read.
"""
def __init__(self) -> None:
super().__init__(
"The stream was closed before the read operation could be completed"
)
class TypedAttributeLookupError(LookupError):
"""
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not
found and no default value has been given.
"""
class WouldBlock(Exception):
"""Raised by ``X_nowait`` functions if ``X()`` would block."""
This diff is collapsed.
from ..abc import AsyncResource
from ._tasks import CancelScope
async def aclose_forcefully(resource: AsyncResource) -> None:
"""
Close an asynchronous resource in a cancelled scope.
Doing this closes the resource without waiting on anything.
:param resource: the resource to close
"""
with CancelScope() as scope:
scope.cancel()
await resource.aclose()
from typing import AsyncIterator
from ._compat import DeprecatedAsyncContextManager
from ._eventloop import get_asynclib
def open_signal_receiver(
*signals: int,
) -> DeprecatedAsyncContextManager[AsyncIterator[int]]:
"""
Start receiving operating system signals.
:param signals: signals to receive (e.g. ``signal.SIGINT``)
:return: an asynchronous context manager for an asynchronous iterator which yields signal
numbers
.. warning:: Windows does not support signals natively so it is best to avoid relying on this
in cross-platform applications.
.. warning:: On asyncio, this permanently replaces any previous signal handler for the given
signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
"""
return get_asynclib().open_signal_receiver(*signals)
This diff is collapsed.
import math
from typing import Any, Optional, Tuple, Type, TypeVar, overload
from ..streams.memory import (
MemoryObjectReceiveStream,
MemoryObjectSendStream,
MemoryObjectStreamState,
)
T_Item = TypeVar("T_Item")
@overload
def create_memory_object_stream(
max_buffer_size: float, item_type: Type[T_Item]
) -> Tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
...
@overload
def create_memory_object_stream(
max_buffer_size: float = 0,
) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
...
def create_memory_object_stream(
max_buffer_size: float = 0, item_type: Optional[Type[T_Item]] = None
) -> Tuple[MemoryObjectSendStream[Any], MemoryObjectReceiveStream[Any]]:
"""
Create a memory object stream.
:param max_buffer_size: number of items held in the buffer until ``send()`` starts blocking
:param item_type: type of item, for marking the streams with the right generic type for
static typing (not used at run time)
:return: a tuple of (send stream, receive stream)
"""
if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
raise ValueError("max_buffer_size must be either an integer or math.inf")
if max_buffer_size < 0:
raise ValueError("max_buffer_size cannot be negative")
state: MemoryObjectStreamState = MemoryObjectStreamState(max_buffer_size)
return MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)
from io import BytesIO
from os import PathLike
from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess
from typing import (
IO,
Any,
AsyncIterable,
List,
Mapping,
Optional,
Sequence,
Union,
cast,
)
from ..abc import Process
from ._eventloop import get_asynclib
from ._tasks import create_task_group
async def run_process(
command: Union[str, bytes, Sequence[Union[str, bytes]]],
*,
input: Optional[bytes] = None,
stdout: Union[int, IO[Any], None] = PIPE,
stderr: Union[int, IO[Any], None] = PIPE,
check: bool = True,
cwd: Union[str, bytes, "PathLike[str]", None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False,
) -> "CompletedProcess[bytes]":
"""
Run an external command in a subprocess and wait until it completes.
.. seealso:: :func:`subprocess.run`
:param command: either a string to pass to the shell, or an iterable of strings containing the
executable name or path and its arguments
:param input: bytes passed to the standard input of the subprocess
:param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL`
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or
:data:`subprocess.STDOUT`
:param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process
terminates with a return code other than 0
:param cwd: If not ``None``, change the working directory to this before running the command
:param env: if not ``None``, this mapping replaces the inherited environment variables from the
parent process
:param start_new_session: if ``true`` the setsid() system call will be made in the child
process prior to the execution of the subprocess. (POSIX only)
:return: an object representing the completed process
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a
nonzero return code
"""
async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
buffer = BytesIO()
async for chunk in stream:
buffer.write(chunk)
stream_contents[index] = buffer.getvalue()
async with await open_process(
command,
stdin=PIPE if input else DEVNULL,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
start_new_session=start_new_session,
) as process:
stream_contents: List[Optional[bytes]] = [None, None]
try:
async with create_task_group() as tg:
if process.stdout:
tg.start_soon(drain_stream, process.stdout, 0)
if process.stderr:
tg.start_soon(drain_stream, process.stderr, 1)
if process.stdin and input:
await process.stdin.send(input)
await process.stdin.aclose()
await process.wait()
except BaseException:
process.kill()
raise
output, errors = stream_contents
if check and process.returncode != 0:
raise CalledProcessError(cast(int, process.returncode), command, output, errors)
return CompletedProcess(command, cast(int, process.returncode), output, errors)
async def open_process(
command: Union[str, bytes, Sequence[Union[str, bytes]]],
*,
stdin: Union[int, IO[Any], None] = PIPE,
stdout: Union[int, IO[Any], None] = PIPE,
stderr: Union[int, IO[Any], None] = PIPE,
cwd: Union[str, bytes, "PathLike[str]", None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False,
) -> Process:
"""
Start an external command in a subprocess.
.. seealso:: :class:`subprocess.Popen`
:param command: either a string to pass to the shell, or an iterable of strings containing the
executable name or path and its arguments
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a
file-like object, or ``None``
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
a file-like object, or ``None``
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
:data:`subprocess.STDOUT`, a file-like object, or ``None``
:param cwd: If not ``None``, the working directory is changed before executing
:param env: If env is not ``None``, it must be a mapping that defines the environment
variables for the new process
:param start_new_session: if ``true`` the setsid() system call will be made in the child
process prior to the execution of the subprocess. (POSIX only)
:return: an asynchronous process object
"""
shell = isinstance(command, str)
return await get_asynclib().open_process(
command,
shell=shell,
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
start_new_session=start_new_session,
)
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
from abc import ABCMeta, abstractmethod
from types import TracebackType
from typing import Optional, Type, TypeVar
T = TypeVar("T")
class AsyncResource(metaclass=ABCMeta):
"""
Abstract base class for all closeable asynchronous resources.
Works as an asynchronous context manager which returns the instance itself on enter, and calls
:meth:`aclose` on exit.
"""
async def __aenter__(self: T) -> T:
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
await self.aclose()
@abstractmethod
async def aclose(self) -> None:
"""Close the resource."""
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Except when otherwise stated (look for LICENSE files in directories or
information at the beginning of each file) all software and
documentation is licensed as follows:
The MIT License
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
This diff is collapsed.
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Root-Is-Purelib: false
Tag: cp38-cp38-win_amd64
[distutils.setup_keywords]
cffi_modules = cffi.setuptools_ext:cffi_modules
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment