id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
154,738 | import base64
import enum
import io
import threading
import time
import typing
import uuid
from collections import defaultdict, namedtuple
from typing import Any, Iterator, Optional, Tuple, Union
import weakref
from ._device import BaseDevice
from ._proto import *
class DataType(str, enum.Enum):
SCREENSHOT = "screenshot"
CPU = "cpu"
MEMORY = "memory"
NETWORK = "network" # 流量
FPS = "fps"
PAGE = "page"
GPU = "gpu"
def __init__(self, d: BaseDevice, perfs: typing.List[DataType] = []):
self._d = d
self._bundle_id = None
self._stop_event = threading.Event()
self._wg = WaitGroup()
self._started = False
self._result = defaultdict(list)
self._perfs = perfs
# the callback function accepts all the data
self._callback = None
def start(self, bundle_id: str, callback: CallbackType = None):
if not callback:
# 默认不输出屏幕的截图(暂时没想好怎么处理)
callback = lambda _type, data: print(_type.value, data, flush=True) if _type != DataType.SCREENSHOT and _type in self._perfs else None
self._rp = RunningProcess(self._d, bundle_id)
self._thread_start(callback)
def _thread_start(self, callback: CallbackType):
iters = []
if DataType.CPU in self._perfs or DataType.MEMORY in self._perfs:
iters.append(iter_cpu_memory(self._d, self._rp))
if DataType.FPS in self._perfs:
iters.append(iter_fps(self._d))
if DataType.GPU in self._perfs:
iters.append(iter_gpu(self._d))
if DataType.SCREENSHOT in self._perfs:
iters.append(set_interval(iter_screenshot(self._d), 2.0))
if DataType.NETWORK in self._perfs:
iters.append(iter_network_flow(self._d, self._rp))
for it in (iters): # yapf: disable
self._wg.add(1)
threading.Thread(name="perf",
target=append_data,
args=(self._wg, self._stop_event, it,
callback,self._perfs),
daemon=True).start()
def stop(self): # -> PerfReport:
self._stop_event.set()
with self._d.connect_instruments() as ts:
print('Stop Sampling...')
if DataType.NETWORK in self._perfs: ts.stop_network_iter()
if DataType.GPU in self._perfs or DataType.FPS in self._perfs: ts.stop_iter_opengl_data()
if DataType.CPU in self._perfs or DataType.MEMORY in self._perfs: ts.stop_iter_cpu_memory()
print("\nFinished!")
# memory and fps will take at least 1 second to catch _stop_event
# to make function run faster, we not using self._wg.wait(..) here
# > self._wg.wait(timeout=3.0) # wait all stopped
# > self._started = False
def wait(self, timeout: float):
return self._wg.wait(timeout=timeout)
class BaseDevice():
def __init__(self,
udid: Optional[str] = None,
usbmux: Union[Usbmux, str, None] = None):
if not usbmux:
self._usbmux = Usbmux()
elif isinstance(usbmux, str):
self._usbmux = Usbmux(usbmux)
elif isinstance(usbmux, Usbmux):
self._usbmux = usbmux
self._udid = udid
self._info: DeviceInfo = None
self._lock = threading.Lock()
self._pair_record = None
def debug(self) -> bool:
return logging.getLogger(LOG.root).level == logging.DEBUG
def debug(self, v: bool):
# log setup
setup_logger(LOG.root,
level=logging.DEBUG if v else logging.INFO)
def usbmux(self) -> Usbmux:
return self._usbmux
def info(self) -> DeviceInfo:
if self._info:
return self._info
devices = self._usbmux.device_list()
if self._udid:
for d in devices:
if d.udid == self._udid:
self._info = d
else:
if len(devices) == 0:
raise MuxError("No device connected")
elif len(devices) > 1:
raise MuxError("More then one device connected")
_d = devices[0]
self._udid = _d.udid
self._info = _d
if not self._info:
raise MuxError("Device: {} not ready".format(self._udid))
return self._info
def is_connected(self) -> bool:
return self.udid in self.usbmux.device_udid_list()
def udid(self) -> str:
return self._udid
def devid(self) -> int:
return self.info.device_id
def pair_record(self) -> dict:
if not self._pair_record:
self.handshake()
return self._pair_record
def pair_record(self, val: Optional[dict]):
self._pair_record = val
def _read_pair_record(self) -> dict:
"""
DeviceCertificate
EscrowBag
HostID
HostCertificate
HostPrivateKey
RootCertificate
RootPrivateKey
SystemBUID
WiFiMACAddress
Pair data can be found in
win32: os.environ["ALLUSERSPROFILE"] + "/Apple/Lockdown/"
darwin: /var/db/lockdown/
linux: /var/lib/lockdown/
if ios version > 13.0
get pair data from usbmuxd
else:
generate pair data with python
"""
payload = {
'MessageType': 'ReadPairRecord', # Required
'PairRecordID': self.udid, # Required
'ClientVersionString': 'libusbmuxd 1.1.0',
'ProgName': PROGRAM_NAME,
'kLibUSBMuxVersion': 3
}
data = self._usbmux.send_recv(payload)
record_data = data['PairRecordData']
return bplist.loads(record_data)
def delete_pair_record(self):
data = self._usbmux.send_recv({
"MessageType": "DeletePairRecord",
"PairRecordID": self.udid,
"ProgName": PROGRAM_NAME,
})
# Expect: {'MessageType': 'Result', 'Number': 0}
def pair(self):
"""
Same as idevicepair pair
iconsole is a github project, hosted in https://github.com/anonymous5l/iConsole
"""
device_public_key = self.get_value("DevicePublicKey", no_session=True)
if not device_public_key:
raise MuxError("Unable to retrieve DevicePublicKey")
buid = self._usbmux.read_system_BUID()
wifi_address = self.get_value("WiFiAddress", no_session=True)
try:
from ._ca import make_certs_and_key
except ImportError:
#print("DevicePair require pyOpenSSL and pyans1, install by the following command")
#print("\tpip3 install pyOpenSSL pyasn1", flush=True)
raise RuntimeError("DevicePair required lib, fix with: pip3 install pyOpenSSL pyasn1")
cert_pem, priv_key_pem, dev_cert_pem = make_certs_and_key(device_public_key)
pair_record = {
'DevicePublicKey': device_public_key,
'DeviceCertificate': dev_cert_pem,
'HostCertificate': cert_pem,
'HostID': str(uuid.uuid4()).upper(),
'RootCertificate': cert_pem,
'SystemBUID': buid,
}
with self.create_inner_connection() as s:
ret = s.send_recv_packet({
"Request": "Pair",
"PairRecord": pair_record,
"Label": PROGRAM_NAME,
"ProtocolVersion": "2",
"PairingOptions": {
"ExtendedPairingErrors": True,
}
})
assert ret, "Pair request got empty response"
if "Error" in ret:
# error could be "PasswordProtected" or "PairingDialogResponsePending"
raise MuxError("pair:", ret['Error'])
assert 'EscrowBag' in ret, ret
pair_record['HostPrivateKey'] = priv_key_pem
pair_record['EscrowBag'] = ret['EscrowBag']
pair_record['WiFiMACAddress'] = wifi_address
self.usbmux.send_recv({
"MessageType": "SavePairRecord",
"PairRecordID": self.udid,
"PairRecordData": bplist.dumps(pair_record),
"DeviceID": self.devid,
})
return pair_record
def handshake(self):
"""
set self._pair_record
"""
try:
self._pair_record = self._read_pair_record()
except MuxReplyError as err:
if err.reply_code == UsbmuxReplyCode.BadDevice:
self._pair_record = self.pair()
def ssl_pemfile_path(self):
with self._lock:
appdir = get_app_dir("ssl")
fpath = os.path.join(appdir, self._udid + "-" + self._host_id + ".pem")
if os.path.exists(fpath):
# 3 minutes not regenerate pemfile
st_mtime = datetime.datetime.fromtimestamp(
os.stat(fpath).st_mtime)
if datetime.datetime.now() - st_mtime < datetime.timedelta(
minutes=3):
return fpath
with open(fpath, "wb") as f:
pdata = self.pair_record
f.write(pdata['HostPrivateKey'])
f.write(b"\n")
f.write(pdata['HostCertificate'])
return fpath
def _host_id(self):
return self.pair_record['HostID']
def _system_BUID(self):
return self.pair_record['SystemBUID']
def create_inner_connection(
self,
port: int = LOCKDOWN_PORT, # 0xf27e,
_ssl: bool = False,
ssl_dial_only: bool = False) -> PlistSocketProxy:
"""
make connection to iphone inner port
"""
device_id = self.info.device_id
conn = self._usbmux.connect_device_port(device_id, port)
if _ssl:
with set_socket_timeout(conn.get_socket, 10.0):
psock = conn.psock
psock.switch_to_ssl(self.ssl_pemfile_path)
if ssl_dial_only:
psock.ssl_unwrap()
return conn
def create_session(self) -> Session:
"""
create secure connection to lockdown service
"""
s = self.create_inner_connection()
data = s.send_recv_packet({"Request": "QueryType"})
assert data['Type'] == LockdownService.MobileLockdown
data = s.send_recv_packet({
'Request': 'GetValue',
'Key': 'ProductVersion',
'Label': PROGRAM_NAME,
})
# Expect: {'Key': 'ProductVersion', 'Request': 'GetValue', 'Value': '13.4.1'}
data = s.send_recv_packet({
"Request": "StartSession",
"HostID": self.pair_record['HostID'],
"SystemBUID": self.pair_record['SystemBUID'],
"ProgName": PROGRAM_NAME,
})
if 'Error' in data:
if data['Error'] == 'InvalidHostID':
# try to repair device
self.pair_record = None
self.delete_pair_record()
self.handshake()
# After paired, call StartSession again
data = s.send_recv_packet({
"Request": "StartSession",
"HostID": self.pair_record['HostID'],
"SystemBUID": self.pair_record['SystemBUID'],
"ProgName": PROGRAM_NAME,
})
else:
raise MuxError("StartSession", data['Error'])
session_id = data['SessionID']
if data['EnableSessionSSL']:
# tempfile.NamedTemporaryFile is not working well on windows
# See: https://stackoverflow.com/questions/6416782/what-is-namedtemporaryfile-useful-for-on-windows
s.psock.switch_to_ssl(self.ssl_pemfile_path)
return Session(s, session_id)
def device_info(self, domain: Optional[str] = None) -> dict:
"""
Args:
domain: can be found in "ideviceinfo -h", eg: com.apple.disk_usage
"""
return self.get_value(domain=domain)
def get_value(self, key: str = '', domain: str = "", no_session: bool = False):
""" key can be: ProductVersion
Args:
domain (str): com.apple.disk_usage
no_session: set to True when not paired
"""
request = {
"Request": "GetValue",
"Label": PROGRAM_NAME,
}
if key:
request['Key'] = key
if domain:
request['Domain'] = domain
if no_session:
with self.create_inner_connection() as s:
ret = s.send_recv_packet(request)
return ret['Value']
else:
with self.create_session() as conn:
ret = conn.send_recv_packet(request)
return ret.get('Value')
def set_value(self, domain: str, key: str, value: typing.Any):
request = {
"Domain": domain,
"Key": key,
"Label": "oa",
"Request": "SetValue",
"Value": value
}
with self.create_session() as s:
ret = s.send_recv_packet(request)
error = ret.get("Error")
if error:
raise ServiceError(error)
def set_assistive_touch(self, enabled: bool):
"""
show or close screen assitive touch button
Raises:
ServiceError
"""
self.set_value("com.apple.Accessibility", "AssistiveTouchEnabledByiTunes", enabled)
def screen_info(self) -> ScreenInfo:
info = self.device_info("com.apple.mobile.iTunes")
kwargs = {
"width": info['ScreenWidth'],
"height": info['ScreenHeight'],
"scale": info['ScreenScaleFactor'], # type: float
}
return ScreenInfo(**kwargs)
def battery_info(self) -> BatteryInfo:
info = self.device_info('com.apple.mobile.battery')
return BatteryInfo(
level=info['BatteryCurrentCapacity'],
is_charging=info.get('BatteryIsCharging'),
external_charge_capable=info.get('ExternalChargeCapable'),
external_connected=info.get('ExternalConnected'),
fully_charged=info.get('FullyCharged'),
gas_gauge_capability=info.get('GasGaugeCapability'),
has_battery=info.get('HasBattery')
)
def storage_info(self) -> StorageInfo:
""" the unit might be 1000 not 1024 """
info = self.device_info('com.apple.disk_usage')
disk = info['TotalDiskCapacity']
size = info['TotalDataCapacity']
free = info['TotalDataAvailable']
used = size - free
return StorageInfo(disk_size=disk, used=used, free=free)
def reboot(self) -> str:
""" reboot device """
conn = self.start_service("com.apple.mobile.diagnostics_relay")
ret = conn.send_recv_packet({
"Request": "Restart",
"Label": PROGRAM_NAME,
})
return ret['Status']
def shutdown(self):
conn = self.start_service("com.apple.mobile.diagnostics_relay")
ret = conn.send_recv_packet({
"Request": "Shutdown",
"Label": PROGRAM_NAME,
})
return ret['Status']
def get_io_power(self) -> dict:
return self.get_io_registry('IOPMPowerSource')
def get_io_registry(self, name: str) -> dict:
conn = self.start_service("com.apple.mobile.diagnostics_relay")
ret = conn.send_recv_packet({
'Request': 'IORegistry',
'EntryClass': name,
"Label": PROGRAM_NAME,
})
return ret
def get_crashmanager(self) -> CrashManager:
"""
https://github.com/libimobiledevice/libimobiledevice/blob/master/tools/idevicecrashreport.c
"""
# read "ping" message which indicates the crash logs have been moved to a safe harbor
move_conn = self.start_service(LockdownService.CRASH_REPORT_MOVER_SERVICE)
ack = b'ping\x00'
if ack != move_conn.psock.recvall(len(ack)):
raise ServiceError("ERROR: Crash logs could not be moved. Connection interrupted")
copy_conn = self.start_service(LockdownService.CRASH_REPORT_COPY_MOBILE_SERVICE)
return CrashManager(copy_conn)
def enable_ios16_developer_mode(self, reboot_ok: bool = False):
"""
enabling developer mode on iOS 16
"""
is_developer = self.get_value("DeveloperModeStatus", domain="com.apple.security.mac.amfi")
if is_developer:
return True
if reboot_ok:
if self._send_action_to_amfi_lockdown(action=1) == 0xe6:
raise ServiceError("Device is rebooting in order to enable \"Developer Mode\"")
# https://developer.apple.com/documentation/xcode/enabling-developer-mode-on-a-device
resp_code = self._send_action_to_amfi_lockdown(action=0)
if resp_code == 0xd9:
raise ServiceError("Developer Mode is not opened, to enable Developer Mode goto Settings -> Privacy & Security -> Developer Mode")
else:
raise ServiceError("Failed to enable \"Developer Mode\"")
def _send_action_to_amfi_lockdown(self, action: int) -> int:
"""
Args:
action:
0: Show "Developer Mode" Tab in Privacy & Security
1: Reboot device to dialog of Open "Developer Mode" (经过测试发现只有在设备没设置密码的情况下才能用)
"""
conn = self.start_service(LockdownService.AmfiLockdown)
body = plistlib2.dumps({"action": action})
payload = struct.pack(">I", len(body)) + body
conn.psock.sendall(payload)
rawdata = conn.psock.recv()
(resp_code,) = struct.unpack(">I", rawdata[:4])
return resp_code
def start_service(self, name: str) -> PlistSocketProxy:
try:
return self._unsafe_start_service(name)
except (MuxServiceError, MuxError):
self.mount_developer_image()
# maybe should wait here
time.sleep(.5)
return self._unsafe_start_service(name)
def _unsafe_start_service(self, name: str) -> PlistSocketProxy:
with self.create_session() as _s:
s: PlistSocketProxy = _s
del(_s)
data = s.send_recv_packet({
"Request": "StartService",
"Service": name,
"Label": PROGRAM_NAME,
})
if 'Error' in data: # data['Error'] is InvalidService
error = data['Error'] # PasswordProtected, InvalidService
raise MuxServiceError(error)
# Expect recv
# {'EnableServiceSSL': True,
# 'Port': 53428,
# 'Request': 'StartService',
# 'Service': 'com.apple.xxx'}
assert data.get('Service') == name
_ssl = data.get(
'EnableServiceSSL',
False)
# These DTX based services only execute a SSL Handshake
# and then go back to sending unencrypted data right after the handshake.
ssl_dial_only = False
if name in ("com.apple.instruments.remoteserver",
"com.apple.accessibility.axAuditDaemon.remoteserver",
"com.apple.testmanagerd.lockdown",
"com.apple.debugserver"):
ssl_dial_only = True
conn = self.create_inner_connection(data['Port'], _ssl=_ssl, ssl_dial_only=ssl_dial_only)
conn.name = data['Service']
return conn
def screenshot(self) -> Image.Image:
return next(self.iter_screenshot())
def iter_screenshot(self) -> Iterator[Image.Image]:
""" take screenshot infinite """
conn = self.start_service(LockdownService.MobileScreenshotr)
version_exchange = conn.recv_packet()
# Expect recv: ['DLMessageVersionExchange', 300, 0]
data = conn.send_recv_packet([
'DLMessageVersionExchange', 'DLVersionsOk', version_exchange[1]
])
# Expect recv: ['DLMessageDeviceReady']
assert data[0] == 'DLMessageDeviceReady'
while True:
# code will be blocked here until next(..) called
data = conn.send_recv_packet([
'DLMessageProcessMessage', {
'MessageType': 'ScreenShotRequest'
}
])
# Expect recv: ['DLMessageProcessMessage', {'MessageType': 'ScreenShotReply', ScreenShotData': b'\x89PNG\r\n\x...'}]
assert len(data) == 2 and data[0] == 'DLMessageProcessMessage'
assert isinstance(data[1], dict)
assert data[1]['MessageType'] == "ScreenShotReply"
png_data = data[1]['ScreenShotData']
yield pil_imread(png_data)
def name(self) -> str:
return self.get_value("DeviceName", no_session=True)
def product_version(self) -> str:
return self.get_value("ProductVersion", no_session=True)
def product_type(self) -> str:
return self.get_value("ProductType", no_session=True)
def app_sync(self, bundle_id: str, command: str = "VendDocuments") -> Sync:
# Change command(VendContainer -> VendDocuments)
# According to https://github.com/GNOME/gvfs/commit/b8ad223b1e2fbe0aec24baeec224a76d91f4ca2f
# Ref: https://github.com/libimobiledevice/libimobiledevice/issues/193
conn = self.start_service(LockdownService.MobileHouseArrest)
conn.send_packet({
"Command": command,
"Identifier": bundle_id,
})
return Sync(conn)
def installation(self) -> Installation:
conn = self.start_service(Installation.SERVICE_NAME)
return Installation(conn)
def imagemounter(self) -> ImageMounter:
"""
start_service will call imagemounter, so here should call
_unsafe_start_service instead
"""
conn = self._unsafe_start_service(ImageMounter.SERVICE_NAME)
return ImageMounter(conn)
def _request_developer_image_dir(self, major: int, minor: int) -> typing.Optional[str]:
# 1. use local path
# 2. use download cache resource
# 3. download from network
version = str(major) + "." + str(minor)
if platform.system() == "Darwin":
mac_developer_dir = f"/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport/{version}"
image_path = os.path.join(mac_developer_dir, "DeveloperDiskImage.dmg")
signature_path = image_path + ".signature"
if os.path.isfile(image_path) and os.path.isfile(signature_path):
return mac_developer_dir
try:
image_path = get_developer_image_path(version)
return image_path
except (DownloadError, DeveloperImageError):
logger.debug("DeveloperImage not found: %s", version)
return None
def _test_if_developer_mounted(self) -> bool:
try:
with self.create_session():
self._unsafe_start_service(LockdownService.MobileLockdown)
return True
except MuxServiceError:
return False
def mount_developer_image(self, reboot_ok: bool = False):
"""
Raises:
MuxError, ServiceError
"""
product_version = self.get_value("ProductVersion")
if semver_compare(product_version, "16.0.0") >= 0:
self.enable_ios16_developer_mode(reboot_ok=reboot_ok)
try:
if self.imagemounter.is_developer_mounted():
logger.info("DeveloperImage already mounted")
return
except MuxError: # expect: DeviceLocked
pass
if self._test_if_developer_mounted():
logger.info("DeviceLocked, but DeveloperImage already mounted")
return
major, minor = product_version.split(".")[:2]
for guess_minor in range(int(minor), -1, -1):
version = f"{major}.{guess_minor}"
developer_img_dir = self._request_developer_image_dir(int(major), guess_minor)
if developer_img_dir:
image_path = os.path.join(developer_img_dir, "DeveloperDiskImage.dmg")
signature_path = image_path + ".signature"
try:
self.imagemounter.mount(image_path, signature_path)
logger.info("DeveloperImage %s mounted successfully", version)
return
except MuxError as err:
if "ImageMountFailed" in str(err):
logger.info("DeveloperImage %s mount failed, try next version", version)
else:
raise ServiceError("ImageMountFailed")
raise ServiceError("DeveloperImage not found")
def sync(self) -> Sync:
conn = self.start_service(LockdownService.AFC)
return Sync(conn)
def app_stop(self, pid_or_name: Union[int, str]) -> int:
"""
return pid killed
"""
with self.connect_instruments() as ts:
if isinstance(pid_or_name, int):
ts.app_kill(pid_or_name)
return pid_or_name
elif isinstance(pid_or_name, str):
bundle_id = pid_or_name
app_infos = list(self.installation.iter_installed(app_type=None))
ps = ts.app_process_list(app_infos)
for p in ps:
if p['bundle_id'] == bundle_id:
ts.app_kill(p['pid'])
return p['pid']
return None
def app_kill(self, *args, **kwargs) -> int:
""" alias of app_stop """
return self.app_stop(*args, **kwargs)
def app_start(self,
bundle_id: str,
args: Optional[list] = [],
env: typing.Mapping = {}) -> int:
"""
start application
Args:
bundle_id: com.apple.Preferences
args: eg ['-AppleLanguages', '(en)']
env: eg {'MYPATH': '/tmp'}
Returns:
pid
"""
if args is None:
args = []
with self.connect_instruments() as ts:
return ts.app_launch(bundle_id, args=args, app_env=env)
def app_install(self, file_or_url: Union[str, typing.IO]) -> str:
"""
Args:
file_or_url: local path or url
Returns:
bundle_id
Raises:
ServiceError, IOError
# Copying 'WebDriverAgentRunner-Runner-resign.ipa' to device... DONE.
# Installing 'com.facebook.WebDriverAgentRunner.xctrunner'
# - CreatingStagingDirectory (5%)
# - ExtractingPackage (15%)
# - InspectingPackage (20%)
# - TakingInstallLock (20%)
# - PreflightingApplication (30%)
# - InstallingEmbeddedProfile (30%)
# - VerifyingApplication (40%)
# - CreatingContainer (50%)
# - InstallingApplication (60%)
# - PostflightingApplication (70%)
# - SandboxingApplication (80%)
# - GeneratingApplicationMap (90%)
# - Complete
"""
is_url = bool(re.match(r"^https?://", file_or_url))
if is_url:
url = file_or_url
tmpdir = tempfile.TemporaryDirectory()
filepath = os.path.join(tmpdir.name, "_tmp.ipa")
logger.info("Download to tmp path: %s", filepath)
with requests.get(url, stream=True) as r:
filesize = int(r.headers.get("content-length"))
preader = ProgressReader(r.raw, filesize)
with open(filepath, "wb") as f:
shutil.copyfileobj(preader, f)
preader.finish()
elif os.path.isfile(file_or_url):
filepath = file_or_url
else:
raise IOError(
"Local path {} not exist".format(file_or_url))
ir = IPAReader(filepath)
bundle_id = ir.get_bundle_id()
short_version = ir.get_short_version()
ir.close()
conn = self.start_service(LockdownService.AFC)
afc = Sync(conn)
ipa_tmp_dir = "PublicStaging"
if not afc.exists(ipa_tmp_dir):
afc.mkdir(ipa_tmp_dir)
print("Copying {!r} to device...".format(filepath), end=" ")
sys.stdout.flush()
target_path = ipa_tmp_dir + "/" + bundle_id + ".ipa"
filesize = os.path.getsize(filepath)
with open(filepath, 'rb') as f:
preader = ProgressReader(f, filesize)
afc.push_content(target_path, preader)
preader.finish()
print("DONE.")
print("Installing {!r} {!r}".format(bundle_id, short_version))
return self.installation.install(bundle_id, target_path)
def app_uninstall(self, bundle_id: str) -> bool:
"""
Note: It seems always return True
"""
return self.installation.uninstall(bundle_id)
def _connect_testmanagerd_lockdown(self) -> DTXService:
if self.major_version() >= 14:
conn = self.start_service(
LockdownService.TestmanagerdLockdownSecure)
else:
conn = self.start_service(LockdownService.TestmanagerdLockdown)
return DTXService(conn)
# 2022-08-24 add retry delay, looks like sometime can recover
# BrokenPipeError(ConnectionError)
def connect_instruments(self) -> ServiceInstruments:
""" start service for instruments """
if self.major_version() >= 14:
conn = self.start_service(
LockdownService.InstrumentsRemoteServerSecure)
else:
conn = self.start_service(LockdownService.InstrumentsRemoteServer)
return ServiceInstruments(conn)
def _gen_xctest_configuration(self,
app_info: dict,
session_identifier: uuid.UUID,
target_app_bundle_id: str = None,
target_app_env: Optional[dict] = None,
target_app_args: Optional[list] = None,
tests_to_run: Optional[set] = None) -> bplist.XCTestConfiguration:
# CFBundleName always endswith -Runner
exec_name: str = app_info['CFBundleExecutable']
assert exec_name.endswith("-Runner"), "Invalid CFBundleExecutable: %s" % exec_name
target_name = exec_name[:-len("-Runner")]
# xctest_path = f"/tmp/{target_name}-{str(session_identifier).upper()}.xctestconfiguration" # yapf: disable
return bplist.XCTestConfiguration({
"testBundleURL": bplist.NSURL(None, f"file://{app_info['Path']}/PlugIns/{target_name}.xctest"),
"sessionIdentifier": session_identifier,
"targetApplicationBundleID": target_app_bundle_id,
"targetApplicationArguments": target_app_args or [],
"targetApplicationEnvironment": target_app_env or {},
"testsToRun": tests_to_run or set(), # We can use "set()" or "None" as default value, but "{}" won't work because the decoding process regards "{}" as a dictionary.
"testsMustRunOnMainThread": True,
"reportResultsToIDE": True,
"reportActivities": True,
"automationFrameworkPath": "/Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework",
}) # yapf: disable
def _launch_wda_app(self,
bundle_id: str,
session_identifier: uuid.UUID,
xctest_configuration: bplist.XCTestConfiguration,
quit_event: threading.Event = None,
test_runner_env: Optional[dict] = None,
test_runner_args: Optional[list] = None
) -> typing.Tuple[ServiceInstruments, int]: # pid
app_info = self.installation.lookup(bundle_id)
sign_identity = app_info.get("SignerIdentity", "")
logger.info("SignIdentity: %r", sign_identity)
app_container = app_info['Container']
# CFBundleName always endswith -Runner
exec_name = app_info['CFBundleExecutable']
logger.info("CFBundleExecutable: %s", exec_name)
assert exec_name.endswith("-Runner"), "Invalid CFBundleExecutable: %s" % exec_name
target_name = exec_name[:-len("-Runner")]
xctest_path = f"/tmp/{target_name}-{str(session_identifier).upper()}.xctestconfiguration" # yapf: disable
xctest_content = bplist.objc_encode(xctest_configuration)
fsync = self.app_sync(bundle_id, command="VendContainer")
for fname in fsync.listdir("/tmp"):
if fname.endswith(".xctestconfiguration"):
logger.debug("remove /tmp/%s", fname)
fsync.remove("/tmp/" + fname)
fsync.push_content(xctest_path, xctest_content)
# service: com.apple.instruments.remoteserver
conn = self.connect_instruments()
channel = conn.make_channel(InstrumentsService.ProcessControl)
conn.call_message(channel, "processIdentifierForBundleIdentifier:", [bundle_id])
# launch app
identifier = "launchSuspendedProcessWithDevicePath:bundleIdentifier:environment:arguments:options:"
app_path = app_info['Path']
xctestconfiguration_path = app_container + xctest_path # xctest_path="/tmp/WebDriverAgentRunner-" + str(session_identifier).upper() + ".xctestconfiguration"
logger.debug("AppPath: %s", app_path)
logger.debug("AppContainer: %s", app_container)
app_env = {
'CA_ASSERT_MAIN_THREAD_TRANSACTIONS': '0',
'CA_DEBUG_TRANSACTIONS': '0',
'DYLD_FRAMEWORK_PATH': app_path + '/Frameworks:',
'DYLD_LIBRARY_PATH': app_path + '/Frameworks',
'MTC_CRASH_ON_REPORT': '1',
'NSUnbufferedIO': 'YES',
'SQLITE_ENABLE_THREAD_ASSERTIONS': '1',
'WDA_PRODUCT_BUNDLE_IDENTIFIER': '',
'XCTestBundlePath': f"{app_info['Path']}/PlugIns/{target_name}.xctest",
'XCTestConfigurationFilePath': xctestconfiguration_path,
'XCODE_DBG_XPC_EXCLUSIONS': 'com.apple.dt.xctestSymbolicator',
'MJPEG_SERVER_PORT': '',
'USE_PORT': '',
# maybe no needed
'LLVM_PROFILE_FILE': app_container + "/tmp/%p.profraw", # %p means pid
} # yapf: disable
if test_runner_env:
app_env.update(test_runner_env)
if self.major_version() >= 11:
app_env['DYLD_INSERT_LIBRARIES'] = '/Developer/usr/lib/libMainThreadChecker.dylib'
app_env['OS_ACTIVITY_DT_MODE'] = 'YES'
app_args = [
'-NSTreatUnknownArgumentsAsOpen', 'NO',
'-ApplePersistenceIgnoreState', 'YES'
]
app_args.extend(test_runner_args or [])
app_options = {'StartSuspendedKey': False}
if self.major_version() >= 12:
app_options['ActivateSuspended'] = True
pid = conn.call_message(
channel, identifier,
[app_path, bundle_id, app_env, app_args, app_options])
if not isinstance(pid, int):
logger.error("Launch failed: %s", pid)
raise MuxError("Launch failed")
logger.info("Launch %r pid: %d", bundle_id, pid)
aux = AUXMessageBuffer()
aux.append_obj(pid)
conn.call_message(channel, "startObservingPid:", aux)
def _callback(m: DTXMessage):
# logger.info("output: %s", m.result)
if m is None:
logger.warning("WebDriverAgentRunner quitted")
return
if m.flags == 0x02:
method, args = m.result
if method == 'outputReceived:fromProcess:atTime:':
# logger.info("Output: %s", args[0].strip())
logger.debug("logProcess: %s", args[0].rstrip())
# XCTestOutputBarrier is just ouput separators, no need to
# print them in the logs.
if args[0].rstrip() != 'XCTestOutputBarrier':
xcuitest_console_logger.debug('%s', args[0].rstrip())
# In low iOS versions, 'Using singleton test manager' may not be printed... mark wda launch status = True if server url has been printed
if "ServerURLHere" in args[0]:
logger.info("%s", args[0].rstrip())
logger.info("WebDriverAgent start successfully")
def _log_message_callback(m: DTXMessage):
identifier, args = m.result
logger.debug("logConsole: %s", args)
if isinstance(args, (tuple, list)):
for msg in args:
msg = msg.rstrip() if isinstance(msg, str) else msg
xcuitest_process_logger.debug('%s', msg)
else:
xcuitest_process_logger.debug('%s', args)
conn.register_callback("_XCT_logDebugMessage:", _log_message_callback)
conn.register_callback(Event.NOTIFICATION, _callback)
if quit_event:
conn.register_callback(Event.FINISHED, lambda _: quit_event.set())
return conn, pid
def major_version(self) -> int:
version = self.get_value("ProductVersion")
return int(version.split(".")[0])
def _fnmatch_find_bundle_id(self, bundle_id: str) -> str:
bundle_ids = []
for binfo in self.installation.iter_installed(
attrs=['CFBundleIdentifier']):
if fnmatch.fnmatch(binfo['CFBundleIdentifier'], bundle_id):
bundle_ids.append(binfo['CFBundleIdentifier'])
if not bundle_ids:
raise MuxError("No app matches", bundle_id)
# use irma first
bundle_ids.sort(
key=lambda v: v != 'com.facebook.wda.irmarunner.xctrunner')
return bundle_ids[0]
def runwda(self, fuzzy_bundle_id="com.*.xctrunner", target_bundle_id=None,
test_runner_env: Optional[dict]=None,
test_runner_args: Optional[list]=None,
target_app_env: Optional[dict]=None,
target_app_args: Optional[list]=None,
tests_to_run: Optional[set]=None):
""" Alias of xcuitest """
bundle_id = self._fnmatch_find_bundle_id(fuzzy_bundle_id)
logger.info("BundleID: %s", bundle_id)
return self.xcuitest(bundle_id, target_bundle_id=target_bundle_id,
test_runner_env=test_runner_env,
test_runner_args=test_runner_args,
target_app_env=target_app_env,
target_app_args=target_app_args,
tests_to_run=tests_to_run)
def xcuitest(self, bundle_id, target_bundle_id=None,
test_runner_env: dict={},
test_runner_args: Optional[list]=None,
target_app_env: Optional[dict]=None,
target_app_args: Optional[list]=None,
tests_to_run: Optional[set]=None):
"""
Launch xctrunner and wait until quit
Args:
bundle_id (str): xctrunner bundle id
target_bundle_id (str): optional, launch WDA-UITests will not need it
test_runner_env (dict[str, str]): optional, the environment variables to be passed to the test runner
test_runner_args (list[str]): optional, the command line arguments to be passed to the test runner
target_app_env (dict[str, str]): optional, the environmen variables to be passed to the target app
target_app_args (list[str]): optional, the command line arguments to be passed to the target app
tests_to_run (set[str]): optional, the specific test classes or test methods to run
"""
product_version = self.get_value("ProductVersion")
logger.info("ProductVersion: %s", product_version)
logger.info("UDID: %s", self.udid)
XCODE_VERSION = 29
session_identifier = uuid.uuid4()
# when connections closes, this event will be set
quit_event = threading.Event()
##
## IDE 1st connection
x1 = self._connect_testmanagerd_lockdown()
# index: 427
x1_daemon_chan = x1.make_channel(
'dtxproxy:XCTestManager_IDEInterface:XCTestManager_DaemonConnectionInterface'
)
if self.major_version() >= 11:
identifier = '_IDE_initiateControlSessionWithProtocolVersion:'
aux = AUXMessageBuffer()
aux.append_obj(XCODE_VERSION)
x1.call_message(x1_daemon_chan, identifier, aux)
x1.register_callback(Event.FINISHED, lambda _: quit_event.set())
##
## IDE 2nd connection
x2 = self._connect_testmanagerd_lockdown()
x2_deamon_chan = x2.make_channel(
'dtxproxy:XCTestManager_IDEInterface:XCTestManager_DaemonConnectionInterface'
)
x2.register_callback(Event.FINISHED, lambda _: quit_event.set())
#x2.register_callback("pidDiedCallback:" # maybe no needed
_start_flag = threading.Event()
def _start_executing(m: Optional[DTXMessage] = None):
if _start_flag.is_set():
return
_start_flag.set()
logger.info("Start execute test plan with IDE version: %d",
XCODE_VERSION)
x2.call_message(0xFFFFFFFF, '_IDE_startExecutingTestPlanWithProtocolVersion:', [XCODE_VERSION], expects_reply=False)
def _show_log_message(m: DTXMessage):
logger.debug("logMessage: %s", m.result[1])
if 'Received test runner ready reply' in ''.join(
m.result[1]):
logger.info("Test runner ready detected")
_start_executing()
if isinstance(m.result[1], (tuple, list)):
for msg in m.result[1]:
msg = msg.rstrip() if isinstance(msg, str) else msg
xcuitest_process_logger.debug('%s', msg)
else:
xcuitest_process_logger.debug('%s', m.result[1])
test_results = []
test_results_lock = threading.Lock()
def _record_test_result_callback(m: DTXMessage):
result = None
if isinstance(m.result, (tuple, list)) and len(m.result) >= 1:
if isinstance(m.result[1], (tuple, list)):
try:
result = XCTestResult(*m.result[1])
except TypeError:
pass
if not result:
logger.warning('Ignore unknown test result message: %s', m)
return
with test_results_lock:
test_results.append(result)
x2.register_callback(
'_XCT_testBundleReadyWithProtocolVersion:minimumVersion:',
_start_executing) # This only happends <= iOS 13
x2.register_callback('_XCT_logDebugMessage:', _show_log_message)
x2.register_callback(
"_XCT_testSuite:didFinishAt:runCount:withFailures:unexpected:testDuration:totalDuration:",
_record_test_result_callback)
app_info = self.installation.lookup(bundle_id)
xctest_configuration = self._gen_xctest_configuration(app_info, session_identifier, target_bundle_id, target_app_env, target_app_args, tests_to_run)
def _ready_with_caps_callback(m: DTXMessage):
x2.send_dtx_message(m.channel_id,
payload=DTXPayload.build_other(0x03, xctest_configuration),
message_id=m.message_id)
x2.register_callback('_XCT_testRunnerReadyWithCapabilities:', _ready_with_caps_callback)
# index: 469
identifier = '_IDE_initiateSessionWithIdentifier:forClient:atPath:protocolVersion:'
aux = AUXMessageBuffer()
aux.append_obj(session_identifier)
aux.append_obj(str(session_identifier) + '-6722-000247F15966B083')
aux.append_obj(
'/Applications/Xcode.app/Contents/Developer/usr/bin/xcodebuild')
aux.append_obj(XCODE_VERSION)
result = x2.call_message(x2_deamon_chan, identifier, aux)
if "NSError" in str(result):
raise RuntimeError("Xcode Invocation Failed: {}".format(result))
# launch test app
# index: 1540
xclogger = setup_logger(name='xcuitest')
_, pid = self._launch_wda_app(
bundle_id,
session_identifier,
xctest_configuration=xctest_configuration,
test_runner_env=test_runner_env,
test_runner_args=test_runner_args)
# xcode call the following commented method, twice
# but it seems can be ignored
# identifier = '_IDE_collectNewCrashReportsInDirectories:matchingProcessNames:'
# aux = AUXMessageBuffer()
# aux.append_obj(['/var/mobile/Library/Logs/CrashReporter/'])
# aux.append_obj(['SpringBoard', 'backboardd', 'xctest'])
# result = x1.call_message(chan, identifier, aux)
# logger.debug("result: %s", result)
# identifier = '_IDE_collectNewCrashReportsInDirectories:matchingProcessNames:'
# aux = AUXMessageBuffer()
# aux.append_obj(['/var/mobile/Library/Logs/CrashReporter/'])
# aux.append_obj(['SpringBoard', 'backboardd', 'xctest'])
# result = x1.call_message(chan, identifier, aux)
# logger.debug("result: %s", result)
# after app launched, operation bellow must be send in 0.1s
# or wda will launch failed
if self.major_version() >= 12:
identifier = '_IDE_authorizeTestSessionWithProcessID:'
aux = AUXMessageBuffer()
aux.append_obj(pid)
result = x1.call_message(x1_daemon_chan, identifier, aux)
elif self.major_version() <= 9:
identifier = '_IDE_initiateControlSessionForTestProcessID:'
aux = AUXMessageBuffer()
aux.append_obj(pid)
result = x1.call_message(x1_daemon_chan, identifier, aux)
else:
identifier = '_IDE_initiateControlSessionForTestProcessID:protocolVersion:'
aux = AUXMessageBuffer()
aux.append_obj(pid)
aux.append_obj(XCODE_VERSION)
result = x1.call_message(x1_daemon_chan, identifier, aux)
if "NSError" in str(result):
raise RuntimeError("Xcode Invocation Failed: {}".format(result))
# wait for quit
# on windows threading.Event.wait can't handle ctrl-c
while not quit_event.wait(.1):
pass
test_result_str = "\n".join(map(str, test_results))
if any(result.failure_count > 0 for result in test_results):
raise RuntimeError(
"Xcode test failed on device with test results:\n"
f"{test_result_str}"
)
logger.info("xctrunner quited with result:\n%s", test_result_str)
def iter_gpu(d: BaseDevice) -> Iterator[Any]:
with d.connect_instruments() as ts:
for data in ts.iter_opengl_data():
device_utilization = data['Device Utilization %'] # Device Utilization
tiler_utilization = data['Tiler Utilization %'] # Tiler Utilization
renderer_utilization = data['Renderer Utilization %'] # Renderer Utilization
yield DataType.GPU, {"device": device_utilization, "renderer": renderer_utilization,
"tiler": tiler_utilization, "time": time.time(), "value": device_utilization} | null |
154,739 | import base64
import enum
import io
import threading
import time
import typing
import uuid
from collections import defaultdict, namedtuple
from typing import Any, Iterator, Optional, Tuple, Union
import weakref
from ._device import BaseDevice
from ._proto import *
class DataType(str, enum.Enum):
def __init__(self, d: BaseDevice, perfs: typing.List[DataType] = []):
def start(self, bundle_id: str, callback: CallbackType = None):
def _thread_start(self, callback: CallbackType):
def stop(self):
def wait(self, timeout: float):
class BaseDevice():
def __init__(self,
udid: Optional[str] = None,
usbmux: Union[Usbmux, str, None] = None):
def debug(self) -> bool:
def debug(self, v: bool):
def usbmux(self) -> Usbmux:
def info(self) -> DeviceInfo:
def is_connected(self) -> bool:
def udid(self) -> str:
def devid(self) -> int:
def pair_record(self) -> dict:
def pair_record(self, val: Optional[dict]):
def _read_pair_record(self) -> dict:
def delete_pair_record(self):
def pair(self):
def handshake(self):
def ssl_pemfile_path(self):
def _host_id(self):
def _system_BUID(self):
def create_inner_connection(
self,
port: int = LOCKDOWN_PORT, # 0xf27e,
_ssl: bool = False,
ssl_dial_only: bool = False) -> PlistSocketProxy:
def create_session(self) -> Session:
def device_info(self, domain: Optional[str] = None) -> dict:
def get_value(self, key: str = '', domain: str = "", no_session: bool = False):
def set_value(self, domain: str, key: str, value: typing.Any):
def set_assistive_touch(self, enabled: bool):
def screen_info(self) -> ScreenInfo:
def battery_info(self) -> BatteryInfo:
def storage_info(self) -> StorageInfo:
def reboot(self) -> str:
def shutdown(self):
def get_io_power(self) -> dict:
def get_io_registry(self, name: str) -> dict:
def get_crashmanager(self) -> CrashManager:
def enable_ios16_developer_mode(self, reboot_ok: bool = False):
def _send_action_to_amfi_lockdown(self, action: int) -> int:
def start_service(self, name: str) -> PlistSocketProxy:
def _unsafe_start_service(self, name: str) -> PlistSocketProxy:
def screenshot(self) -> Image.Image:
def iter_screenshot(self) -> Iterator[Image.Image]:
def name(self) -> str:
def product_version(self) -> str:
def product_type(self) -> str:
def app_sync(self, bundle_id: str, command: str = "VendDocuments") -> Sync:
def installation(self) -> Installation:
def imagemounter(self) -> ImageMounter:
def _request_developer_image_dir(self, major: int, minor: int) -> typing.Optional[str]:
def _test_if_developer_mounted(self) -> bool:
def mount_developer_image(self, reboot_ok: bool = False):
def sync(self) -> Sync:
def app_stop(self, pid_or_name: Union[int, str]) -> int:
def app_kill(self, *args, **kwargs) -> int:
def app_start(self,
bundle_id: str,
args: Optional[list] = [],
env: typing.Mapping = {}) -> int:
def app_install(self, file_or_url: Union[str, typing.IO]) -> str:
def app_uninstall(self, bundle_id: str) -> bool:
def _connect_testmanagerd_lockdown(self) -> DTXService:
def connect_instruments(self) -> ServiceInstruments:
def _gen_xctest_configuration(self,
app_info: dict,
session_identifier: uuid.UUID,
target_app_bundle_id: str = None,
target_app_env: Optional[dict] = None,
target_app_args: Optional[list] = None,
tests_to_run: Optional[set] = None) -> bplist.XCTestConfiguration:
def _launch_wda_app(self,
bundle_id: str,
session_identifier: uuid.UUID,
xctest_configuration: bplist.XCTestConfiguration,
quit_event: threading.Event = None,
test_runner_env: Optional[dict] = None,
test_runner_args: Optional[list] = None
) -> typing.Tuple[ServiceInstruments, int]:
def _callback(m: DTXMessage):
def _log_message_callback(m: DTXMessage):
def major_version(self) -> int:
def _fnmatch_find_bundle_id(self, bundle_id: str) -> str:
def runwda(self, fuzzy_bundle_id="com.*.xctrunner", target_bundle_id=None,
test_runner_env: Optional[dict]=None,
test_runner_args: Optional[list]=None,
target_app_env: Optional[dict]=None,
target_app_args: Optional[list]=None,
tests_to_run: Optional[set]=None):
def xcuitest(self, bundle_id, target_bundle_id=None,
test_runner_env: dict={},
test_runner_args: Optional[list]=None,
target_app_env: Optional[dict]=None,
target_app_args: Optional[list]=None,
tests_to_run: Optional[set]=None):
def _start_executing(m: Optional[DTXMessage] = None):
def _show_log_message(m: DTXMessage):
def _record_test_result_callback(m: DTXMessage):
def _ready_with_caps_callback(m: DTXMessage):
def iter_screenshot(d: BaseDevice) -> Iterator[Tuple[DataType, dict]]:
for img in d.iter_screenshot():
_time = time.time()
img.thumbnail((200, 200)) # 缩小图片已方便保存
buffered = io.BytesIO()
img.save(buffered, format="JPEG")
img_str = base64.b64encode(buffered.getvalue()).decode('utf-8')
# example of convert image to bytes
# buf = io.BytesIO()
# img.save(buf, format="JPEG")
# turn image to URL
yield DataType.SCREENSHOT, {"time": _time, "value": img, "img_base64": img_str, "type": "screenshot"} | null |
154,740 | import base64
import enum
import io
import threading
import time
import typing
import uuid
from collections import defaultdict, namedtuple
from typing import Any, Iterator, Optional, Tuple, Union
import weakref
from ._device import BaseDevice
from ._proto import *
def __init__(self, d: BaseDevice, perfs: typing.List[DataType] = []):
self._d = d
self._bundle_id = None
self._stop_event = threading.Event()
self._wg = WaitGroup()
self._started = False
self._result = defaultdict(list)
self._perfs = perfs
# the callback function accepts all the data
self._callback = None
def start(self, bundle_id: str, callback: CallbackType = None):
if not callback:
# 默认不输出屏幕的截图(暂时没想好怎么处理)
callback = lambda _type, data: print(_type.value, data, flush=True) if _type != DataType.SCREENSHOT and _type in self._perfs else None
self._rp = RunningProcess(self._d, bundle_id)
self._thread_start(callback)
def _thread_start(self, callback: CallbackType):
iters = []
if DataType.CPU in self._perfs or DataType.MEMORY in self._perfs:
iters.append(iter_cpu_memory(self._d, self._rp))
if DataType.FPS in self._perfs:
iters.append(iter_fps(self._d))
if DataType.GPU in self._perfs:
iters.append(iter_gpu(self._d))
if DataType.SCREENSHOT in self._perfs:
iters.append(set_interval(iter_screenshot(self._d), 2.0))
if DataType.NETWORK in self._perfs:
iters.append(iter_network_flow(self._d, self._rp))
for it in (iters): # yapf: disable
self._wg.add(1)
threading.Thread(name="perf",
target=append_data,
args=(self._wg, self._stop_event, it,
callback,self._perfs),
daemon=True).start()
def stop(self): # -> PerfReport:
self._stop_event.set()
with self._d.connect_instruments() as ts:
print('Stop Sampling...')
if DataType.NETWORK in self._perfs: ts.stop_network_iter()
if DataType.GPU in self._perfs or DataType.FPS in self._perfs: ts.stop_iter_opengl_data()
if DataType.CPU in self._perfs or DataType.MEMORY in self._perfs: ts.stop_iter_cpu_memory()
print("\nFinished!")
# memory and fps will take at least 1 second to catch _stop_event
# to make function run faster, we not using self._wg.wait(..) here
# > self._wg.wait(timeout=3.0) # wait all stopped
# > self._started = False
def wait(self, timeout: float):
return self._wg.wait(timeout=timeout)
def set_interval(it: Iterator[Any], interval: float):
while True:
start = time.time()
data = next(it)
yield data
wait = max(0, interval - (time.time() - start))
time.sleep(wait) | null |
154,741 | import base64
import enum
import io
import threading
import time
import typing
import uuid
from collections import defaultdict, namedtuple
from typing import Any, Iterator, Optional, Tuple, Union
import weakref
from ._device import BaseDevice
from ._proto import *
class DataType(str, enum.Enum):
class RunningProcess:
def __init__(self, d: BaseDevice, bundle_id: str):
def bundle_id(self) -> str:
def get_pid(self) -> Union[int, None]:
def gen_stimestamp(seconds: Optional[float] = None) -> str:
class BaseDevice():
def __init__(self,
udid: Optional[str] = None,
usbmux: Union[Usbmux, str, None] = None):
def debug(self) -> bool:
def debug(self, v: bool):
def usbmux(self) -> Usbmux:
def info(self) -> DeviceInfo:
def is_connected(self) -> bool:
def udid(self) -> str:
def devid(self) -> int:
def pair_record(self) -> dict:
def pair_record(self, val: Optional[dict]):
def _read_pair_record(self) -> dict:
def delete_pair_record(self):
def pair(self):
def handshake(self):
def ssl_pemfile_path(self):
def _host_id(self):
def _system_BUID(self):
def create_inner_connection(
self,
port: int = LOCKDOWN_PORT, # 0xf27e,
_ssl: bool = False,
ssl_dial_only: bool = False) -> PlistSocketProxy:
def create_session(self) -> Session:
def device_info(self, domain: Optional[str] = None) -> dict:
def get_value(self, key: str = '', domain: str = "", no_session: bool = False):
def set_value(self, domain: str, key: str, value: typing.Any):
def set_assistive_touch(self, enabled: bool):
def screen_info(self) -> ScreenInfo:
def battery_info(self) -> BatteryInfo:
def storage_info(self) -> StorageInfo:
def reboot(self) -> str:
def shutdown(self):
def get_io_power(self) -> dict:
def get_io_registry(self, name: str) -> dict:
def get_crashmanager(self) -> CrashManager:
def enable_ios16_developer_mode(self, reboot_ok: bool = False):
def _send_action_to_amfi_lockdown(self, action: int) -> int:
def start_service(self, name: str) -> PlistSocketProxy:
def _unsafe_start_service(self, name: str) -> PlistSocketProxy:
def screenshot(self) -> Image.Image:
def iter_screenshot(self) -> Iterator[Image.Image]:
def name(self) -> str:
def product_version(self) -> str:
def product_type(self) -> str:
def app_sync(self, bundle_id: str, command: str = "VendDocuments") -> Sync:
def installation(self) -> Installation:
def imagemounter(self) -> ImageMounter:
def _request_developer_image_dir(self, major: int, minor: int) -> typing.Optional[str]:
def _test_if_developer_mounted(self) -> bool:
def mount_developer_image(self, reboot_ok: bool = False):
def sync(self) -> Sync:
def app_stop(self, pid_or_name: Union[int, str]) -> int:
def app_kill(self, *args, **kwargs) -> int:
def app_start(self,
bundle_id: str,
args: Optional[list] = [],
env: typing.Mapping = {}) -> int:
def app_install(self, file_or_url: Union[str, typing.IO]) -> str:
def app_uninstall(self, bundle_id: str) -> bool:
def _connect_testmanagerd_lockdown(self) -> DTXService:
def connect_instruments(self) -> ServiceInstruments:
def _gen_xctest_configuration(self,
app_info: dict,
session_identifier: uuid.UUID,
target_app_bundle_id: str = None,
target_app_env: Optional[dict] = None,
target_app_args: Optional[list] = None,
tests_to_run: Optional[set] = None) -> bplist.XCTestConfiguration:
def _launch_wda_app(self,
bundle_id: str,
session_identifier: uuid.UUID,
xctest_configuration: bplist.XCTestConfiguration,
quit_event: threading.Event = None,
test_runner_env: Optional[dict] = None,
test_runner_args: Optional[list] = None
) -> typing.Tuple[ServiceInstruments, int]:
def _callback(m: DTXMessage):
def _log_message_callback(m: DTXMessage):
def major_version(self) -> int:
def _fnmatch_find_bundle_id(self, bundle_id: str) -> str:
def runwda(self, fuzzy_bundle_id="com.*.xctrunner", target_bundle_id=None,
test_runner_env: Optional[dict]=None,
test_runner_args: Optional[list]=None,
target_app_env: Optional[dict]=None,
target_app_args: Optional[list]=None,
tests_to_run: Optional[set]=None):
def xcuitest(self, bundle_id, target_bundle_id=None,
test_runner_env: dict={},
test_runner_args: Optional[list]=None,
target_app_env: Optional[dict]=None,
target_app_args: Optional[list]=None,
tests_to_run: Optional[set]=None):
def _start_executing(m: Optional[DTXMessage] = None):
def _show_log_message(m: DTXMessage):
def _record_test_result_callback(m: DTXMessage):
def _ready_with_caps_callback(m: DTXMessage):
def iter_network_flow(d: BaseDevice, rp: RunningProcess) -> Iterator[Any]:
n = 0
with d.connect_instruments() as ts:
for nstat in ts.iter_network():
# if n < 2:
# n += 1
# continue
nstat['timestamp'] = gen_stimestamp()
yield DataType.NETWORK, nstat
# {
# "timestamp": gen_stimestamp(),
# "downFlow": (nstat['rx.bytes'] or 0) / 1024,
# "upFlow": (nstat['tx.bytes'] or 0) / 1024
# } | null |
154,742 | import base64
import enum
import io
import threading
import time
import typing
import uuid
from collections import defaultdict, namedtuple
from typing import Any, Iterator, Optional, Tuple, Union
import weakref
from ._device import BaseDevice
from ._proto import *
class DataType(str, enum.Enum):
CallbackType = typing.Callable[[DataType, dict], None]
class WaitGroup(object):
def __init__(self):
def add(self, n):
def done(self):
def wait(self, timeout: Optional[float] = None):
def gen_stimestamp(seconds: Optional[float] = None) -> str:
def append_data(wg: WaitGroup, stop_event: threading.Event,
idata: Iterator[Any], callback: CallbackType, filters: list):
for _type, data in idata:
assert isinstance(data, dict)
assert isinstance(_type, DataType)
if stop_event.is_set():
wg.done()
break
if isinstance(data, dict) and "time" in data:
stimestamp = gen_stimestamp(data.pop('time'))
data.update({"timestamp": stimestamp})
# result[_type].append(data)
if _type in filters:
callback(_type, data)
# print(_type, data)
stop_event.set() # 当有一个中断,其他的全部中断,让错误暴露出来 | null |
154,743 | import copy
import uuid
import pprint
import datetime
from typing import Any, Union, List
from .plistlib2 import (InvalidFileException,
load, dump, loads, dumps,
FMT_BINARY, FMT_XML, UID)
class NSNull(NSBaseObject):
"""
NSNull() always return the same instance
"""
_instance = None
def __new__(cls):
if not NSNull._instance:
NSNull._instance = super().__new__(cls)
return NSNull._instance
def __bool__(self):
return False
def encode(objects: list, value: Union[int, str]):
ns_info = {}
objects.append(ns_info)
ns_info['$class'] = UID(len(objects))
objects.append({
"$classname": "NSNull",
"$classes": ["NSNull", "NSObject"],
})
class NSURL(NSBaseObject):
def __init__(self, base, relative):
self._base = base
self._relative = relative
def __eq__(self, other) -> bool:
return self._base == other._base and self._relative == other._relative
def __str__(self):
return "NSURL({}, {})".format(self._base, self._relative)
def __repr__(self):
return self.__str__()
def encode(objects: list, value):
ns_info = {}
objects.append(ns_info)
ns_info['NS.base'] = _encode_any(objects, value._base)
ns_info['NS.relative'] = _encode_any(objects, value._relative)
ns_info['$class'] = UID(len(objects))
objects.append({
'$classes': ['NSURL', 'NSObject'],
'$classname': 'NSURL'
})
def decode(objects: list, ns_info: dict):
base = _parse_object(objects, ns_info['NS.base'])
relative = _parse_object(objects, ns_info['NS.relative'])
return NSURL(base, relative)
def objc_encode(value: Any) -> bytes:
objects = ['$null']
_encode_any(objects, value)
pdata = {
"$version": 100000,
"$archiver": "NSKeyedArchiver",
"$top": {
"root": UID(1),
},
"$objects": objects
}
return dumps(pdata, fmt=FMT_BINARY)
def objc_decode(data: Union[bytes, dict]) -> Any:
if isinstance(data, (bytes, bytearray)):
data = loads(data)
if not isinstance(data,
dict) or data.get('$archiver') != 'NSKeyedArchiver':
raise InvalidNSKeyedArchiverFormat()
assert data['$version'] == 100000
objects = data["$objects"]
root_index = data["$top"]['root'].data
return _parse_object(objects, root_index)
def loads(value, *, fmt=None, use_builtin_types=True, dict_type=dict):
"""Read a .plist file from a bytes object.
Return the unpacked root object (which usually is a dictionary).
"""
fp = BytesIO(value)
return load(
fp, fmt=fmt, use_builtin_types=use_builtin_types, dict_type=dict_type)
def test_objc_encode_decode():
# yapf: disable
for value in (
"hello world",
{"hello": "world"}, [1, 2, 3],
{"hello": [1, 2, 3]},
set([1, 2, 3]),
{"hello": set([1, 2, 3])},
uuid.uuid4(),
NSNull(),
NSURL(None, "file://abce"),
{"none-type": None},
{"hello": {"level2": "hello"}},
{"hello": {
"level2": "hello",
"uuid": uuid.uuid4(),
"level3": [1, 2, 3],
"ns-uuid-null": [uuid.uuid4(), NSNull()]}},
# set([1, {"a": 2}, 3]), # not supported, since dict is not hashable
):
bdata = objc_encode(value)
try:
pdata = objc_decode(bdata)
print("TEST: {:20s}".format(str(value)), end="\t")
assert pdata == value
print("[OK]")
except Exception as e:
print("Value:", value)
pprint.pprint(loads(bdata))
raise
# data = loads(bdata)
# pdata = objc_decode(data)
# assert pdata == value
# yapf: enable
# TODO
# NSDate decode | null |
154,744 | import binascii
import codecs
import contextlib
import datetime
import enum
from io import BytesIO
import itertools
import os
import re
import struct
from warnings import warn
from xml.parsers.expat import ParserCreate
def _maybe_open(pathOrFile, mode):
if isinstance(pathOrFile, str):
with open(pathOrFile, mode) as fp:
yield fp
else:
yield pathOrFile
def load(fp, *, fmt=None, use_builtin_types=True, dict_type=dict):
"""Read a .plist file. 'fp' should be a readable and binary file object.
Return the unpacked root object (which usually is a dictionary).
"""
if fmt is None:
header = fp.read(32)
fp.seek(0)
for info in _FORMATS.values():
if info['detect'](header):
P = info['parser']
break
else:
raise InvalidFileException()
else:
P = _FORMATS[fmt]['parser']
p = P(use_builtin_types=use_builtin_types, dict_type=dict_type)
return p.parse(fp)
The provided code snippet includes necessary dependencies for implementing the `readPlist` function. Write a Python function `def readPlist(pathOrFile)` to solve the following problem:
Read a .plist from a path or file. pathOrFile should either be a file name, or a readable binary file object. This function is deprecated, use load instead.
Here is the function:
def readPlist(pathOrFile):
"""
Read a .plist from a path or file. pathOrFile should either
be a file name, or a readable binary file object.
This function is deprecated, use load instead.
"""
warn("The readPlist function is deprecated, use load() instead",
DeprecationWarning, 2)
with _maybe_open(pathOrFile, 'rb') as fp:
return load(fp, fmt=None, use_builtin_types=False) | Read a .plist from a path or file. pathOrFile should either be a file name, or a readable binary file object. This function is deprecated, use load instead. |
154,745 | import binascii
import codecs
import contextlib
import datetime
import enum
from io import BytesIO
import itertools
import os
import re
import struct
from warnings import warn
from xml.parsers.expat import ParserCreate
def _maybe_open(pathOrFile, mode):
if isinstance(pathOrFile, str):
with open(pathOrFile, mode) as fp:
yield fp
else:
yield pathOrFile
def dump(value, fp, *, fmt=FMT_XML, sort_keys=True, skipkeys=False):
"""Write 'value' to a .plist file. 'fp' should be a writable,
binary file object.
"""
if fmt not in _FORMATS:
raise ValueError("Unsupported format: %r"%(fmt,))
writer = _FORMATS[fmt]["writer"](fp, sort_keys=sort_keys, skipkeys=skipkeys)
writer.write(value)
The provided code snippet includes necessary dependencies for implementing the `writePlist` function. Write a Python function `def writePlist(value, pathOrFile)` to solve the following problem:
Write 'value' to a .plist file. 'pathOrFile' may either be a file name or a (writable) file object. This function is deprecated, use dump instead.
Here is the function:
def writePlist(value, pathOrFile):
"""
Write 'value' to a .plist file. 'pathOrFile' may either be a
file name or a (writable) file object.
This function is deprecated, use dump instead.
"""
warn("The writePlist function is deprecated, use dump() instead",
DeprecationWarning, 2)
with _maybe_open(pathOrFile, 'wb') as fp:
dump(value, fp, fmt=FMT_XML, sort_keys=True, skipkeys=False) | Write 'value' to a .plist file. 'pathOrFile' may either be a file name or a (writable) file object. This function is deprecated, use dump instead. |
154,746 | import binascii
import codecs
import contextlib
import datetime
import enum
from io import BytesIO
import itertools
import os
import re
import struct
from warnings import warn
from xml.parsers.expat import ParserCreate
def load(fp, *, fmt=None, use_builtin_types=True, dict_type=dict):
"""Read a .plist file. 'fp' should be a readable and binary file object.
Return the unpacked root object (which usually is a dictionary).
"""
if fmt is None:
header = fp.read(32)
fp.seek(0)
for info in _FORMATS.values():
if info['detect'](header):
P = info['parser']
break
else:
raise InvalidFileException()
else:
P = _FORMATS[fmt]['parser']
p = P(use_builtin_types=use_builtin_types, dict_type=dict_type)
return p.parse(fp)
The provided code snippet includes necessary dependencies for implementing the `readPlistFromBytes` function. Write a Python function `def readPlistFromBytes(data)` to solve the following problem:
Read a plist data from a bytes object. Return the root object. This function is deprecated, use loads instead.
Here is the function:
def readPlistFromBytes(data):
"""
Read a plist data from a bytes object. Return the root object.
This function is deprecated, use loads instead.
"""
warn("The readPlistFromBytes function is deprecated, use loads() instead",
DeprecationWarning, 2)
return load(BytesIO(data), fmt=None, use_builtin_types=False) | Read a plist data from a bytes object. Return the root object. This function is deprecated, use loads instead. |
154,747 | import binascii
import codecs
import contextlib
import datetime
import enum
from io import BytesIO
import itertools
import os
import re
import struct
from warnings import warn
from xml.parsers.expat import ParserCreate
def dump(value, fp, *, fmt=FMT_XML, sort_keys=True, skipkeys=False):
"""Write 'value' to a .plist file. 'fp' should be a writable,
binary file object.
"""
if fmt not in _FORMATS:
raise ValueError("Unsupported format: %r"%(fmt,))
writer = _FORMATS[fmt]["writer"](fp, sort_keys=sort_keys, skipkeys=skipkeys)
writer.write(value)
The provided code snippet includes necessary dependencies for implementing the `writePlistToBytes` function. Write a Python function `def writePlistToBytes(value)` to solve the following problem:
Return 'value' as a plist-formatted bytes object. This function is deprecated, use dumps instead.
Here is the function:
def writePlistToBytes(value):
"""
Return 'value' as a plist-formatted bytes object.
This function is deprecated, use dumps instead.
"""
warn("The writePlistToBytes function is deprecated, use dumps() instead",
DeprecationWarning, 2)
f = BytesIO()
dump(value, f, fmt=FMT_XML, sort_keys=True, skipkeys=False)
return f.getvalue() | Return 'value' as a plist-formatted bytes object. This function is deprecated, use dumps instead. |
154,750 | import binascii
import codecs
import contextlib
import datetime
import enum
from io import BytesIO
import itertools
import os
import re
import struct
from warnings import warn
from xml.parsers.expat import ParserCreate
_dateParser = re.compile(r"(?P<year>\d\d\d\d)(?:-(?P<month>\d\d)(?:-(?P<day>\d\d)(?:T(?P<hour>\d\d)(?::(?P<minute>\d\d)(?::(?P<second>\d\d))?)?)?)?)?Z", re.ASCII)
def _date_from_string(s):
order = ('year', 'month', 'day', 'hour', 'minute', 'second')
gd = _dateParser.match(s).groupdict()
lst = []
for key in order:
val = gd[key]
if val is None:
break
lst.append(int(val))
return datetime.datetime(*lst) | null |
154,754 | import binascii
import codecs
import contextlib
import datetime
import enum
from io import BytesIO
import itertools
import os
import re
import struct
from warnings import warn
from xml.parsers.expat import ParserCreate
def _count_to_size(count):
if count < 1 << 8:
return 1
elif count < 1 << 16:
return 2
elif count << 1 << 32:
return 4
else:
return 8 | null |
154,756 | import enum
import io
import logging
import queue
import re
import struct
import threading
import typing
import weakref
from collections import defaultdict, namedtuple
from typing import Any, Iterator, List, Optional, Tuple, Union
from ctypes import Structure,c_byte,c_uint16,c_uint32
from socket import inet_ntoa,htons,inet_ntop,AF_INET6
from retry import retry
from . import bplist
from . import struct2 as ct
from ._proto import LOG, InstrumentsService
from ._safe_socket import PlistSocketProxy
from .exceptions import MuxError, ServiceError
class BinReader(io.BytesIO):
def read_u32(self) -> int:
(val, ) = struct.unpack("I", self.read(4))
return val
def read_u64(self) -> int:
(val, ) = struct.unpack("Q", self.read(8))
return val
def read_exactly(self, n: int) -> bytes:
data = self.read(n)
if len(data) != n:
raise MuxError("read expect length: 0x%X, got: 0x%x" %
(n, len(data)))
return data
class MuxError(BaseError):
""" Mutex error """
pass
The provided code snippet includes necessary dependencies for implementing the `unpack_aux_message` function. Write a Python function `def unpack_aux_message(data: bytes) -> list` to solve the following problem:
Parse aux message array Returns: list of data
Here is the function:
def unpack_aux_message(data: bytes) -> list:
""" Parse aux message array
Returns:
list of data
"""
if len(data) < 16:
raise MuxError("aux data is too small to unpack")
args = []
br = BinReader(data)
br.read(16) # ignore first 16 magic bytes
while True:
typedata = br.read(4)
if not typedata:
break
_type = struct.unpack("I", typedata)[0]
if _type == 2:
_len = br.read_u32()
archived_data = br.read(_len)
val = bplist.objc_decode(archived_data)
args.append(val)
elif _type in [3, 5]:
val = br.read_u32()
args.append(val)
elif _type in [4, 6]:
val = br.read_u64()
args.append(val)
elif _type == 10:
# Ignore
pass
else:
raise MuxError("Unknown type", hex(_type))
return args | Parse aux message array Returns: list of data |
154,757 | import datetime
import fnmatch
import io
import logging
import os
import pathlib
import platform
import re
import shutil
import struct
import sys
import tempfile
import threading
import time
import typing
import uuid
from typing import Iterator, Optional, Union
import requests
from deprecation import deprecated
from logzero import setup_logger
from PIL import Image
from retry import retry
from . import bplist, plistlib2
from ._crash import CrashManager
from ._imagemounter import ImageMounter, get_developer_image_path
from ._installation import Installation
from ._instruments import (AUXMessageBuffer, DTXMessage, DTXPayload, DTXService, Event,
ServiceInstruments)
from ._ipautil import IPAReader
from ._proto import *
from ._safe_socket import *
from ._sync import Sync
from ._types import DeviceInfo, XCTestResult
from ._usbmux import Usbmux
from ._utils import (ProgressReader, get_app_dir, semver_compare,
set_socket_timeout)
from .datatypes import *
from .exceptions import *
from .session import Session
The provided code snippet includes necessary dependencies for implementing the `pil_imread` function. Write a Python function `def pil_imread(data: Union[str, pathlib.Path, bytes, bytearray]) -> Image.Image` to solve the following problem:
Convert data(path, binary) to PIL.Image.Image Raises: TypeError
Here is the function:
def pil_imread(data: Union[str, pathlib.Path, bytes, bytearray]) -> Image.Image:
""" Convert data(path, binary) to PIL.Image.Image
Raises:
TypeError
"""
if isinstance(data, (bytes, bytearray)):
memory_fd = io.BytesIO(data)
im = Image.open(memory_fd)
im.load()
del (memory_fd)
return im
elif isinstance(data, str):
return Image.open(data)
else:
raise TypeError("Unknown data type", type(data)) | Convert data(path, binary) to PIL.Image.Image Raises: TypeError |
154,758 | import struct
from collections import namedtuple
from functools import partial
import typing
U16 = UInt16 = partial(Field, format="H")
U32 = UInt32 = partial(Field, format="I")
class Struct(object):
def __init__(self, typename: str, *fields, byteorder="<"):
self._fields = [self._convert_field(f) for f in fields]
self._typename = typename
self._fmt = byteorder + ''.join([f.format for f in self._fields])
self._field_names = []
for f in self._fields:
if f.name in self._field_names:
raise ValueError("Struct has duplicated name", f.name)
self._field_names.append(f.name)
self._size = sum([f.size for f in self._fields])
def size(self):
return struct.Struct(self._fmt).size
def _convert_field(self, fvalue):
if isinstance(fvalue, Field):
return fvalue
else:
raise ValueError("Unknown type:", fvalue)
def parse_stream(self, reader: typing.BinaryIO):
return self.parse(reader.read(self.size))
def parse(self, buffer: bytes):
values = struct.unpack(self._fmt, buffer)
return namedtuple(self._typename, self._field_names)(*values)
def build(self, *args, **kwargs) -> bytearray:
if args:
assert len(args) == 1
assert isinstance(args[0], dict)
kwargs = args[0]
buffer = bytearray()
for f in self._fields:
value = kwargs.get(f.name)
if value is None:
if f.default is None:
raise ValueError("missing required field", f.name, value,
f.default)
value = f.default
buffer.extend(struct.pack(f.format, value))
return buffer
def _example():
Message = Struct("Message",
U32("length"),
U16("magic", 0x1234))
m = Message.parse(b"\x12\x00\x00\x00\x12\x35")
assert m.length == 0x12
assert m.magic == 0x3512
buf = Message.build(length=7)
assert buf == b'\x07\x00\x00\x00\x34\x12' | null |
154,759 | import dataclasses
import typing
from dataclasses import dataclass
from ._proto import ConnectionType
def alias_field(name: str) -> dataclasses.Field:
return dataclasses.field(metadata={"alias": name}) | null |
154,760 | import argparse
import datetime
import logging
import os
import plistlib
import pprint
import re
import select
import socket
import ssl
import string
import struct
import sys
import threading
import time
import traceback
import typing
from collections import defaultdict
import hexdump
from logzero import logger as _logger
_package_index = [0]
def next_package_index() -> int:
_package_index[0] += 1
return _package_index[0] | null |
154,761 | import argparse
import datetime
import logging
import os
import plistlib
import pprint
import re
import select
import socket
import ssl
import string
import struct
import sys
import threading
import time
import traceback
import typing
from collections import defaultdict
import hexdump
from logzero import logger as _logger
def remove_from_list(_list: list, value):
try:
_list.remove(value)
except ValueError:
pass | null |
154,762 | import argparse
import datetime
import logging
import os
import plistlib
import pprint
import re
import select
import socket
import ssl
import string
import struct
import sys
import threading
import time
import traceback
import typing
from collections import defaultdict
import hexdump
from logzero import logger as _logger
def create_socket(addr) -> socket.socket:
if isinstance(addr, (tuple, list)):
return socket.socket(socket.AF_INET, socket.SOCK_STREAM)
elif isinstance(addr, str):
return socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) | null |
154,763 | import argparse
import datetime
import logging
import os
import plistlib
import pprint
import re
import select
import socket
import ssl
import string
import struct
import sys
import threading
import time
import traceback
import typing
from collections import defaultdict
import hexdump
from logzero import logger as _logger
The provided code snippet includes necessary dependencies for implementing the `is_ssl_data` function. Write a Python function `def is_ssl_data(data: bytes) -> bool` to solve the following problem:
FIXME(ssx): better to change to EnableSSLSession
Here is the function:
def is_ssl_data(data: bytes) -> bool:
""" FIXME(ssx): better to change to EnableSSLSession """
return len(data) >= 8 and \
data[:3] == b'\x16\x03\x01' and data[5:6] == b'\x01' | FIXME(ssx): better to change to EnableSSLSession |
154,764 | import argparse
import datetime
import logging
import os
import plistlib
import pprint
import re
import select
import socket
import ssl
import string
import struct
import sys
import threading
import time
import traceback
import typing
from collections import defaultdict
import hexdump
from logzero import logger as _logger
def recvall(sock: socket.socket, n: int) -> bytearray:
buf = bytearray()
while n > len(buf):
chunk = sock.recv(n - len(buf))
if not chunk:
raise ValueError("socket not recv all bytes")
buf.extend(chunk)
return buf | null |
154,765 | import argparse
import datetime
import logging
import os
import plistlib
import pprint
import re
import select
import socket
import ssl
import string
import struct
import sys
import threading
import time
import traceback
import typing
from collections import defaultdict
import hexdump
from logzero import logger as _logger
def _parse_addr(addr: str):
if ':' in addr:
host, port = addr.split(":", 1)
return (host, int(port))
return addr | null |
154,766 | from asyncio import gather, run
from pathlib import Path
from std2.asyncio.subprocess import call
_PARENT = Path(__file__).resolve(strict=True).parent
_TOP_LEVEL = _PARENT.parent
async def _build(path: str) -> None:
tag = f"coq_{path.lstrip('_')}"
await call(
"docker",
"buildx",
"build",
"--progress",
"plain",
"--file",
_PARENT / path / "Dockerfile",
"--tag",
tag,
"--",
_TOP_LEVEL,
capture_stdout=False,
capture_stderr=False,
) | null |
154,767 | from datetime import datetime, timezone
from os import environ, sep
from pathlib import Path
from shutil import rmtree
from subprocess import check_call, check_output, run
from sys import executable
from typing import Iterator
def _git_identity() -> None:
email = "ci@ci.ci"
username = "ci-bot"
check_call(("git", "config", "--global", "user.email", email))
check_call(("git", "config", "--global", "user.name", username)) | null |
154,768 | from datetime import datetime, timezone
from os import environ, sep
from pathlib import Path
from shutil import rmtree
from subprocess import check_call, check_output, run
from sys import executable
from typing import Iterator
def _git_clone(path: Path, repo_name: str) -> None:
if path.is_dir():
rmtree(path)
token = environ["CI_TOKEN"]
uri = f"https://ms-jpq:{token}@github.com/ms-jpq/{repo_name}.git"
check_call(("git", "clone", uri, str(path))) | null |
154,769 | from datetime import datetime, timezone
from os import environ, sep
from pathlib import Path
from shutil import rmtree
from subprocess import check_call, check_output, run
from sys import executable
from typing import Iterator
def _build(cwd: Path) -> None:
check_call((executable, "-m", "coq.ci"), cwd=cwd) | null |
154,770 | from datetime import datetime, timezone
from os import environ, sep
from pathlib import Path
from shutil import rmtree
from subprocess import check_call, check_output, run
from sys import executable
from typing import Iterator
def _git_alert(cwd: Path) -> None:
prefix = "ci"
check_call(("git", "fetch"), cwd=cwd)
remote_brs = check_output(("git", "branch", "--remotes"), text=True, cwd=cwd)
def cont() -> Iterator[str]:
for br in remote_brs.splitlines():
b = br.strip()
if b and "->" not in b:
_, _, name = b.partition(sep)
if name.startswith(prefix):
yield name
refs = tuple(cont())
if refs:
check_call(("git", "push", "--delete", "origin", *refs), cwd=cwd)
proc = run(("git", "diff", "--exit-code"), cwd=cwd)
if proc.returncode:
time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d_%H-%M-%S")
brname = f"{prefix}--{time}"
check_call(("git", "checkout", "-b", brname), cwd=cwd)
check_call(("git", "add", "."), cwd=cwd)
check_call(("git", "commit", "-m", f"update_artifacts: {time}"), cwd=cwd)
check_call(("git", "push", "--set-upstream", "origin", brname), cwd=cwd) | null |
154,771 | from dataclasses import asdict
from os.path import normcase
from pathlib import Path
from typing import AbstractSet, Callable, Iterable, Iterator, MutableMapping, MutableSet
from uuid import UUID, uuid3
from pynvim_pp.logging import log
from std2.graphlib import recur_sort
from std2.pathlib import walk
from ...shared.types import UTF8, SnippetGrammar
from ..types import LoadedSnips, LoadError, ParsedSnippet
from .lsp import load_lsp
from .neosnippet import load_neosnippet
from .ultisnip import load_ultisnip
def _load_paths(search: Iterable[Path], exts: AbstractSet[str]) -> Iterator[Path]:
for search_path in search:
for path in walk(search_path):
if path.suffix in exts:
yield Path(normcase(path))
def load_direct(
trans: Callable[[ParsedSnippet], ParsedSnippet],
ignore_error: bool,
lsp: Iterable[Path],
neosnippet: Iterable[Path],
ultisnip: Iterable[Path],
lsp_grammar: SnippetGrammar = SnippetGrammar.lsp,
neosnippet_grammar: SnippetGrammar = SnippetGrammar.snu,
ultisnip_grammar: SnippetGrammar = SnippetGrammar.snu,
) -> LoadedSnips:
specs = {
load_lsp: (lsp_grammar, lsp),
load_neosnippet: (neosnippet_grammar, neosnippet),
load_ultisnip: (ultisnip_grammar, ultisnip),
}
extensions: MutableMapping[str, MutableSet[str]] = {}
snippets: MutableMapping[UUID, ParsedSnippet] = {}
for parser, (grammar, paths) in specs.items():
for path in paths:
with path.open(encoding=UTF8) as fd:
try:
filetype, exts, snips = parser(
grammar, path=path, lines=enumerate(fd, start=1)
)
except LoadError as e:
if ignore_error:
log.warn("%s", e)
else:
raise
else:
ext_acc = extensions.setdefault(filetype, set())
for ext in exts:
ext_acc.add(ext)
for snip in map(trans, snips):
uid = _key(snip)
snippets[uid] = snip
loaded = LoadedSnips(exts=extensions, snippets=snippets)
return loaded
class ParsedSnippet:
grammar: SnippetGrammar
filetype: str
content: str
label: str
doc: str
matches: AbstractSet[str]
class LoadedSnips:
exts: Mapping[str, AbstractSet[str]]
snippets: Mapping[UUID, ParsedSnippet]
def load_ci(
trans: Callable[[ParsedSnippet], ParsedSnippet],
lsp: Iterable[Path],
neosnippet: Iterable[Path],
ultisnip: Iterable[Path],
) -> LoadedSnips:
loaded = load_direct(
trans,
True,
lsp=_load_paths(lsp, exts={".json"}),
neosnippet=_load_paths(neosnippet, exts={".snippets", ".snip"}),
ultisnip=_load_paths(ultisnip, exts={".snippets", ".snip"}),
)
return loaded | null |
154,772 | from contextlib import suppress
from datetime import datetime
from pathlib import PurePath
from posixpath import normcase
from random import choices
from re import RegexFlag, compile
from re import error as RegexError
from string import ascii_letters, digits, hexdigits
from typing import (
AbstractSet,
Callable,
Iterator,
MutableSequence,
Optional,
Pattern,
Sequence,
Tuple,
)
from uuid import uuid4
from std2.functools import identity
from std2.lex import ParseError as StdLexError
from std2.lex import split
from std2.string import removeprefix, removesuffix
from ...shared.parse import lower
from ...shared.types import Context
from .lexer import context_from, next_char, pushback_chars, raise_err, token_parser
from .types import (
EChar,
End,
Index,
IntBegin,
Parsed,
ParseInfo,
ParserCtx,
TokenStream,
Transform,
VarBegin,
)
def _lex(context: ParserCtx, shallow: bool) -> TokenStream:
class Context:
def context_from(snippet: str, context: Context, info: ParseInfo) -> ParserCtx:
def token_parser(context: ParserCtx, stream: TokenStream) -> Parsed:
class ParseInfo:
class Parsed:
def tokenizer(context: Context, info: ParseInfo, snippet: str) -> Parsed:
ctx = context_from(snippet, context=context, info=info)
tokens = _lex(ctx, shallow=False)
parsed = token_parser(ctx, stream=tokens)
return parsed | null |
154,773 | from string import ascii_letters, ascii_lowercase, digits
from typing import AbstractSet, MutableSequence, Optional
from ...shared.types import Context
from .lexer import context_from, next_char, pushback_chars, raise_err, token_parser
from .types import (
End,
IntBegin,
Parsed,
ParseInfo,
ParserCtx,
Token,
TokenStream,
Unparsed,
VarBegin,
)
def _lex(context: ParserCtx, shallow: bool) -> TokenStream:
class Context:
def context_from(snippet: str, context: Context, info: ParseInfo) -> ParserCtx:
def token_parser(context: ParserCtx, stream: TokenStream) -> Parsed:
class ParseInfo:
class Parsed:
def tokenizer(context: Context, info: ParseInfo, snippet: str) -> Parsed:
ctx = context_from(snippet, context=context, info=info)
tokens = _lex(ctx, shallow=False)
parsed = token_parser(ctx, stream=tokens)
return parsed | null |
154,774 | from asyncio import Lock
from contextlib import contextmanager, nullcontext
from time import process_time
from types import TracebackType
from typing import (
Any,
AsyncContextManager,
Iterator,
MutableMapping,
Optional,
Tuple,
Type,
)
from pynvim_pp.logging import log
from std2.locale import si_prefixed_smol
from std2.timeit import timeit as _timeit
from ..consts import DEBUG
def cpu_timeit() -> Iterator[None]:
t1 = process_time()
with _timeit() as t:
yield None
t2 = process_time()
delta = t().total_seconds()
cpu = (t2 - t1) / delta
msg = f"CPU :: {cpu}"
log.info("%s", msg) | null |
154,775 | from functools import lru_cache
from os.path import normcase
from pathlib import Path
from sqlite3.dbapi2 import Connection
from typing import Protocol, cast
from pynvim_pp.lib import decode
from std2.pathlib import AnyPath
from std2.sqlite3 import add_functions, escape
from .fuzzy import quick_ratio
class _Loader(Protocol):
def __call__(self, *paths: AnyPath) -> str: ...
def loader(base: Path) -> _Loader:
@lru_cache(maxsize=None)
def cont(*paths: AnyPath) -> str:
path = (base / Path(*paths)).with_suffix(".sql")
return decode(path.read_bytes())
return cast(_Loader, cont) | null |
154,776 | from functools import lru_cache
from os.path import normcase
from pathlib import Path
from sqlite3.dbapi2 import Connection
from typing import Protocol, cast
from pynvim_pp.lib import decode
from std2.pathlib import AnyPath
from std2.sqlite3 import add_functions, escape
from .fuzzy import quick_ratio
def like_esc(like: str) -> str:
escaped = escape(nono={"%", "_", "["}, escape="!", param=like)
return f"{escaped}%" | null |
154,777 | from itertools import islice
from random import choice
from typing import AbstractSet, Iterator, MutableSequence, Optional, Sequence
from pynvim_pp.text_object import is_word
def coalesce(
unifying_chars: AbstractSet[str],
include_syms: bool,
backwards: Optional[bool],
chars: Sequence[str],
) -> Iterator[str]:
backwards = choice((True, False)) if backwards is None else backwards
words: MutableSequence[str] = []
syms: MutableSequence[str] = []
def w_it() -> Iterator[str]:
if words:
word = "".join(reversed(words) if backwards else words)
words.clear()
yield word
def s_it() -> Iterator[str]:
if syms:
sym = "".join(reversed(syms) if backwards else syms)
syms.clear()
yield sym
for chr in reversed(chars) if backwards else chars:
if is_word(unifying_chars, chr=chr):
words.append(chr)
yield from s_it()
elif not chr.isspace():
if include_syms:
syms.append(chr)
yield from w_it()
else:
yield from w_it()
yield from s_it()
yield from w_it()
yield from s_it()
def tokenize(
tokenization_limit: int,
unifying_chars: AbstractSet[str],
include_syms: bool,
text: str,
) -> Iterator[str]:
words = coalesce(
unifying_chars, include_syms=include_syms, backwards=None, chars=text
)
return islice(words, tokenization_limit) | null |
154,778 | from asyncio import gather
from dataclasses import dataclass
from functools import lru_cache
from os import environ
from pathlib import Path
from typing import Iterator, Mapping, Optional, Sequence, Tuple
from pynvim_pp.lib import decode
from std2.asyncio.subprocess import call
from ..shared.executor import very_nice
class Pane:
session: str
uid: str
session_name: str
window_index: int
window_name: str
pane_index: int
pane_title: str
async def _panes(tmux: Path, all_sessions: bool) -> Sequence[Pane]:
prefix = await very_nice()
try:
proc = await call(
*prefix,
tmux,
"list-panes",
("-a" if all_sessions else "-s"),
"-F",
_SEP.join(
(
"#{session_id}",
"#{pane_id}",
"#{session_name}",
"#{window_index}",
"#{window_name}",
"#{pane_index}",
"#{pane_title}",
)
),
check_returncode=set(),
)
except OSError:
return ()
else:
if proc.returncode:
return ()
else:
def cont() -> Iterator[Pane]:
for line in decode(proc.stdout).strip().splitlines():
(
session,
pane_id,
session_name,
window_index,
window_name,
pane_index,
pane_title,
) = line.split(_SEP)
pane = Pane(
session=session,
uid=pane_id,
session_name=session_name,
window_index=int(window_index),
window_name=window_name,
pane_index=int(pane_index),
pane_title=pane_title,
)
yield pane
return tuple(cont())
async def _screenshot(tmux: Path, pane: Pane) -> Tuple[Pane, str]:
prefix = await very_nice()
try:
proc = await call(
*prefix,
tmux,
"capture-pane",
"-p",
"-J",
"-t",
pane.uid,
check_returncode=set(),
)
except OSError:
return pane, ""
else:
if proc.returncode:
return pane, ""
else:
text = decode(proc.stdout)
return pane, text
def pane_id() -> Optional[str]:
return environ.get("TMUX_PANE")
async def snapshot(
tmux: Path, all_sessions: bool
) -> Tuple[Optional[Pane], Mapping[Pane, str]]:
panes = await _panes(tmux, all_sessions=all_sessions)
shots = await gather(*(_screenshot(tmux, pane=pane) for pane in panes))
current = next(
(pane for pane in panes if pane.uid == pane_id()),
None,
)
snapshot = {pane: text for pane, text in shots}
return current, snapshot | null |
154,779 | from json import loads
from json.decoder import JSONDecodeError
from pathlib import Path
from typing import Iterator, Mapping, MutableMapping, MutableSequence, Tuple
from pynvim_pp.lib import decode
from pynvim_pp.logging import log
from std2.asyncio.subprocess import call
from std2.string import removeprefix, removesuffix
from ..shared.executor import very_nice
from .types import Tag, Tags
_FIELDS = "".join(
f"{{{f}}}"
for f in (
"language",
"input",
"line",
"kind",
"name",
"pattern",
"typeref",
"scope",
"scopeKind",
"access",
"signature",
)
)
async def very_nice() -> Sequence[str]:
f: Future = _very_nice()
return await wrap_future(f)
async def run(ctags: Path, *args: str) -> str:
if not args:
return ""
else:
prefix = await very_nice()
try:
proc = await call(
*prefix,
ctags,
"--sort=no",
"--output-format=json",
f"--fields={_FIELDS}",
*args,
check_returncode=set(),
)
except (FileNotFoundError, PermissionError):
return ""
else:
return decode(proc.stdout) | null |
154,780 | from json import loads
from json.decoder import JSONDecodeError
from pathlib import Path
from typing import Iterator, Mapping, MutableMapping, MutableSequence, Tuple
from pynvim_pp.lib import decode
from pynvim_pp.logging import log
from std2.asyncio.subprocess import call
from std2.string import removeprefix, removesuffix
from ..shared.executor import very_nice
from .types import Tag, Tags
def _unescape(pattern: str) -> str:
def cont() -> Iterator[str]:
stripped = removesuffix(removeprefix(pattern[1:-1], "^"), "$").strip()
it = iter(stripped)
for c in it:
if c == "\\":
nc = next(it, "")
if nc in {"/", "\\"}:
yield nc
else:
yield c
return "".join(cont())
class Tag(TypedDict):
language: str
path: str
line: int
name: str
pattern: Optional[str]
kind: str
typeref: Optional[str]
scope: Optional[str]
scopeKind: Optional[str]
access: Optional[str]
Tags = Mapping[str, Tuple[str, float, Sequence[Tag]]]
def parse(mtimes: Mapping[str, float], raw: str) -> Tags:
tags: MutableMapping[str, Tuple[str, float, MutableSequence[Tag]]] = {}
for line in raw.splitlines():
if line:
try:
json = loads(line)
except JSONDecodeError:
log.warning("%s", line)
else:
if json["_type"] == "tag":
path = json["path"]
if pattern := json.get("pattern"):
new_pattern = _unescape(pattern)
else:
new_pattern = None
json["pattern"] = new_pattern
_, _, acc = tags.setdefault(
path, (json["language"], mtimes.get(path, 0), [])
)
acc.append(json)
return tags | null |
154,781 | from contextlib import closing, suppress
from dataclasses import dataclass
from sqlite3 import Connection, OperationalError
from typing import Iterator, Mapping
from ...consts import INSERT_DB
from ...shared.sql import init_db
from ..types import DB
from .sql import sql
INSERT_DB = normpath(TMP_DIR / "inserts.sqlite3") if DEBUG_DB else ":memory:"
def init_db(conn: Connection) -> None:
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init() -> Connection:
conn = Connection(INSERT_DB, isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,782 | from typing import Any, Awaitable, Callable
from pynvim_pp.atomic import Atomic
from pynvim_pp.autocmd import AutoCMD
from pynvim_pp.handler import RPC
def _name_gen(fn: Callable[..., Awaitable[Any]]) -> str:
return fn.__qualname__.lstrip("_").capitalize() | null |
154,783 | from argparse import ArgumentParser, Namespace
from asyncio import run as arun
from concurrent.futures import ThreadPoolExecutor
from contextlib import redirect_stderr, redirect_stdout, suppress
from io import StringIO
from os import linesep
from pathlib import Path, PurePath
from subprocess import DEVNULL, STDOUT, CalledProcessError, run
from sys import (
executable,
exit,
getswitchinterval,
setswitchinterval,
stderr,
version_info,
)
from textwrap import dedent
from typing import Any, Union
from .consts import GIL_SWITCH, IS_WIN, REQUIREMENTS, RT_DIR, RT_PY, TOP_LEVEL, VARS
def _socket(arg: str) -> Any:
if arg.startswith("localhost:"):
host, _, port = arg.rpartition(":")
return host, int(port)
else:
return PurePath(arg)
def parse_args() -> Namespace:
parser = ArgumentParser()
sub_parsers = parser.add_subparsers(dest="command", required=True)
with nullcontext(sub_parsers.add_parser("run")) as p:
p.add_argument("--ppid", required=True, type=int)
p.add_argument("--socket", required=True, type=_socket)
p.add_argument("--xdg")
with nullcontext(sub_parsers.add_parser("deps")) as p:
p.add_argument("--xdg")
return parser.parse_args() | null |
154,784 | from asyncio import (
AbstractEventLoop,
Condition,
get_running_loop,
run_coroutine_threadsafe,
sleep,
wrap_future,
)
from dataclasses import dataclass, replace
from functools import lru_cache
from itertools import count
from threading import Lock
from typing import (
AbstractSet,
Any,
AsyncIterator,
Iterator,
Mapping,
MutableMapping,
MutableSequence,
Optional,
Sequence,
Tuple,
)
from pynvim_pp.logging import log
from pynvim_pp.nvim import Nvim
from pynvim_pp.types import NoneType
from std2.pickle.decoder import new_decoder
from ...registry import NAMESPACE, rpc
from ...server.rt_types import Stack
from ...shared.timeit import timeit
from ...shared.types import UTF8, UTF16, UTF32, Encoding
class _Client:
name: Optional[str]
offset_encoding: Encoding
message: Any
class _Session:
uid: int
done: bool
acc: MutableSequence[Tuple[_Client, Optional[int]]]
class _Payload:
multipart: Optional[int]
name: str
method: Optional[str]
uid: int
offset_encoding: Optional[str]
client: Optional[str]
done: bool
reply: Any
_DECODER = new_decoder[_Payload](_Payload)
_ENCODING_MAP: Mapping[Optional[str], Encoding] = {
"utf8": UTF8,
"utf16": UTF16,
"utf32": UTF32,
}
_LOCK = Lock()
_STATE: MutableMapping[str, _Session] = {}
def _conds(_: str) -> Tuple[AbstractEventLoop, Condition]:
loop = get_running_loop()
return (loop, Condition())
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
UTF16: Literal["UTF-16-LE"] = "UTF-16-LE"
async def _lsp_notify(stack: Stack, rpayload: _Payload) -> None:
payload = _DECODER(rpayload)
origin, cond = _conds(payload.name)
async def cont() -> None:
with _LOCK:
state = _STATE.get(payload.name)
if not state or payload.uid >= state.uid:
encoding = (payload.offset_encoding or "").casefold().replace("-", "")
offset_encoding = _ENCODING_MAP.get(encoding, UTF16)
client = _Client(
name=payload.client,
offset_encoding=offset_encoding,
message=payload.reply,
)
acc = [
*(state.acc if state and payload.uid == state.uid else ()),
(client, payload.multipart),
]
session = _Session(uid=payload.uid, done=payload.done, acc=acc)
with _LOCK:
_STATE[payload.name] = session
async with cond:
cond.notify_all()
if get_running_loop() == origin:
await cont()
else:
f = run_coroutine_threadsafe(cont(), loop=origin)
await wrap_future(f) | null |
154,785 | from typing import AbstractSet, AsyncIterator, Optional, cast
from ...shared.types import Context, ExternLSP, ExternLUA
from ..parse import parse
from ..protocol import protocol
from ..types import CompletionResponse, LSPcomp
from .request import async_request
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
class ExternLSP:
client: Optional[str]
item: Mapping
command: Optional[Any]
def parse(
protocol: LSProtocol,
extern_type: Union[Type[ExternLSP], Type[ExternLUA]],
always_on_top: Optional[AbstractSet[Optional[str]]],
client: Optional[str],
encoding: Encoding,
short_name: str,
cursors: Cursors,
weight_adjust: float,
resp: CompletionResponse,
) -> LSPcomp:
if _falsy(resp):
return LSPcomp(client=client, local_cache=True, items=iter(()))
elif isinstance(resp, Mapping):
is_complete = _falsy(resp.get("isIncomplete"))
if not isinstance((items := resp.get("items")), Sequence):
log.warn("%s", f"Unknown LSP resp -- {type(resp)}")
return LSPcomp(client=client, local_cache=is_complete, items=iter(()))
else:
defaults = _defaults_parser(resp.get("itemDefaults")) or ItemDefaults()
comps = (
co1
for item in items
if (
co1 := parse_item(
protocol,
extern_type=extern_type,
client=client,
encoding=encoding,
always_on_top=always_on_top,
short_name=short_name,
cursors=cursors,
weight_adjust=weight_adjust,
item=_with_defaults(defaults, item=item),
)
)
)
return LSPcomp(client=client, local_cache=is_complete, items=comps)
elif isinstance(resp, Sequence):
defaults = ItemDefaults()
comps = (
co2
for item in resp
if (
co2 := parse_item(
protocol,
extern_type=extern_type,
always_on_top=always_on_top,
client=client,
encoding=encoding,
short_name=short_name,
cursors=cursors,
weight_adjust=weight_adjust,
item=_with_defaults(defaults, item=item),
)
)
)
return LSPcomp(client=client, local_cache=True, items=comps)
else:
log.warn("%s", f"Unknown LSP resp -- {type(resp)}")
return LSPcomp(client=client, local_cache=False, items=iter(()))
async def protocol() -> LSProtocol:
f: Future = _protocol()
return await wrap_future(f)
CompletionResponse = Union[
Literal[None, False, 0], Sequence[CompletionItem], _CompletionList
]
class LSPcomp:
client: Optional[str]
local_cache: bool
items: Iterator[Completion]
async def async_request(
name: str, multipart: Optional[int], clients: AbstractSet[str], *args: Any
) -> AsyncIterator[_Client]:
with timeit(f"LSP :: {name}"):
(_, cond), uid = _conds(name), next(_uids(name))
with _LOCK:
_STATE[name] = _Session(uid=uid, done=False, acc=[])
async with cond:
cond.notify_all()
await Nvim.api.exec_lua(
NoneType,
f"{NAMESPACE}.{name}(...)",
(name, multipart, uid, tuple(clients), *args),
)
while True:
with _LOCK:
state = _STATE.get(name)
if state:
if state.uid == uid:
while state.acc:
client, multipart = state.acc.pop()
if multipart:
async for part in _lsp_pull(
multipart, client=client.name, uid=uid
):
if isinstance(
client.message, MutableMapping
) and isinstance(client.message.get("items"), Sequence):
message = {**client.message, "items": part}
yield replace(client, message=message)
else:
yield replace(client, message=part)
else:
yield client
if state.done:
with _LOCK:
_STATE.pop(name)
break
elif state.uid > uid:
break
else:
log.info(
"%s", f"<><> DELAYED LSP RESP <><> :: {name} {state.uid} {uid}"
)
async with cond:
await cond.wait()
async def comp_lsp(
short_name: str,
always_on_top: Optional[AbstractSet[Optional[str]]],
weight_adjust: float,
context: Context,
chunk: int,
clients: AbstractSet[str],
) -> AsyncIterator[LSPcomp]:
pc = await protocol()
async for client in async_request("lsp_comp", chunk, clients, context.cursor):
resp = cast(CompletionResponse, client.message)
parsed = parse(
pc,
extern_type=ExternLSP,
client=client.name,
encoding=client.offset_encoding,
short_name=short_name,
cursors=context.cursor,
always_on_top=always_on_top,
weight_adjust=weight_adjust,
resp=resp,
)
yield parsed | null |
154,786 | from typing import AbstractSet, AsyncIterator, Optional, cast
from ...shared.types import Context, ExternLSP, ExternLUA
from ..parse import parse
from ..protocol import protocol
from ..types import CompletionResponse, LSPcomp
from .request import async_request
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
class ExternLUA(ExternLSP): ...
def parse(
protocol: LSProtocol,
extern_type: Union[Type[ExternLSP], Type[ExternLUA]],
always_on_top: Optional[AbstractSet[Optional[str]]],
client: Optional[str],
encoding: Encoding,
short_name: str,
cursors: Cursors,
weight_adjust: float,
resp: CompletionResponse,
) -> LSPcomp:
if _falsy(resp):
return LSPcomp(client=client, local_cache=True, items=iter(()))
elif isinstance(resp, Mapping):
is_complete = _falsy(resp.get("isIncomplete"))
if not isinstance((items := resp.get("items")), Sequence):
log.warn("%s", f"Unknown LSP resp -- {type(resp)}")
return LSPcomp(client=client, local_cache=is_complete, items=iter(()))
else:
defaults = _defaults_parser(resp.get("itemDefaults")) or ItemDefaults()
comps = (
co1
for item in items
if (
co1 := parse_item(
protocol,
extern_type=extern_type,
client=client,
encoding=encoding,
always_on_top=always_on_top,
short_name=short_name,
cursors=cursors,
weight_adjust=weight_adjust,
item=_with_defaults(defaults, item=item),
)
)
)
return LSPcomp(client=client, local_cache=is_complete, items=comps)
elif isinstance(resp, Sequence):
defaults = ItemDefaults()
comps = (
co2
for item in resp
if (
co2 := parse_item(
protocol,
extern_type=extern_type,
always_on_top=always_on_top,
client=client,
encoding=encoding,
short_name=short_name,
cursors=cursors,
weight_adjust=weight_adjust,
item=_with_defaults(defaults, item=item),
)
)
)
return LSPcomp(client=client, local_cache=True, items=comps)
else:
log.warn("%s", f"Unknown LSP resp -- {type(resp)}")
return LSPcomp(client=client, local_cache=False, items=iter(()))
async def protocol() -> LSProtocol:
f: Future = _protocol()
return await wrap_future(f)
CompletionResponse = Union[
Literal[None, False, 0], Sequence[CompletionItem], _CompletionList
]
class LSPcomp:
client: Optional[str]
local_cache: bool
items: Iterator[Completion]
async def async_request(
name: str, multipart: Optional[int], clients: AbstractSet[str], *args: Any
) -> AsyncIterator[_Client]:
with timeit(f"LSP :: {name}"):
(_, cond), uid = _conds(name), next(_uids(name))
with _LOCK:
_STATE[name] = _Session(uid=uid, done=False, acc=[])
async with cond:
cond.notify_all()
await Nvim.api.exec_lua(
NoneType,
f"{NAMESPACE}.{name}(...)",
(name, multipart, uid, tuple(clients), *args),
)
while True:
with _LOCK:
state = _STATE.get(name)
if state:
if state.uid == uid:
while state.acc:
client, multipart = state.acc.pop()
if multipart:
async for part in _lsp_pull(
multipart, client=client.name, uid=uid
):
if isinstance(
client.message, MutableMapping
) and isinstance(client.message.get("items"), Sequence):
message = {**client.message, "items": part}
yield replace(client, message=message)
else:
yield replace(client, message=part)
else:
yield client
if state.done:
with _LOCK:
_STATE.pop(name)
break
elif state.uid > uid:
break
else:
log.info(
"%s", f"<><> DELAYED LSP RESP <><> :: {name} {state.uid} {uid}"
)
async with cond:
await cond.wait()
async def comp_thirdparty(
short_name: str,
always_on_top: Optional[AbstractSet[Optional[str]]],
weight_adjust: float,
context: Context,
chunk: int,
clients: AbstractSet[str],
) -> AsyncIterator[LSPcomp]:
pc = await protocol()
async for client in async_request(
"lsp_third_party", chunk, clients, context.cursor, context.line
):
name = client.name or short_name
resp = cast(CompletionResponse, client.message)
parsed = parse(
pc,
extern_type=ExternLUA,
client=client.name,
encoding=client.offset_encoding,
short_name=name,
cursors=context.cursor,
always_on_top=always_on_top,
weight_adjust=weight_adjust,
resp=resp,
)
yield parsed | null |
154,787 | from asyncio import AbstractEventLoop, gather, get_running_loop, wrap_future
from asyncio.exceptions import CancelledError
from concurrent.futures import Future, ThreadPoolExecutor
from contextlib import AbstractAsyncContextManager, suppress
from functools import wraps
from logging import DEBUG as DEBUG_LV
from logging import INFO
from string import Template
from sys import exit
from textwrap import dedent
from typing import Any, Sequence, cast
from pynvim_pp.logging import log, suppress_and_log
from pynvim_pp.nvim import Nvim, conn
from pynvim_pp.rpc_types import Method, MsgType, RPCallable, ServerAddr
from pynvim_pp.types import NoneType
from std2.contextlib import nullacontext
from std2.pickle.types import DecodeError
from std2.platform import OS, os
from std2.sys import autodie
from ._registry import ____
from .consts import DEBUG, DEBUG_DB, DEBUG_METRICS, TMP_DIR
from .registry import atomic, autocmd, rpc
from .server.registrants.options import set_options
from .server.rt_types import Stack, ValidationError
from .server.runtime import stack
def _autodie(ppid: int) -> AbstractAsyncContextManager:
if os is OS.windows:
return nullacontext(None)
else:
return autodie(ppid)
def _set_debug(loop: AbstractEventLoop) -> None:
loop.set_debug(DEBUG)
if DEBUG or DEBUG_METRICS or DEBUG_DB:
TMP_DIR.mkdir(parents=True, exist_ok=True)
log.setLevel(DEBUG_LV)
else:
log.setLevel(INFO)
async def _default(msg: MsgType, method: Method, params: Sequence[Any]) -> None:
if method not in _IGNORE:
with suppress_and_log():
assert False, (msg, method, params)
def _trans(stack: Stack, handler: _CB) -> _CB:
async def f(*params: Any) -> None:
with suppress(CancelledError):
return await handler(stack, *params)
return cast(_CB, f)
autocmd = AutoCMD()
atomic = Atomic()
rpc = RPC(NAMESPACE, name_gen=_name_gen)
async def set_options(mapping: KeyMapping, fast_close: bool) -> None:
settings = Settings()
keymap = Keymap()
settings["completefunc"] = f"v:lua.{NAMESPACE}.{omnifunc.method}"
if mapping.eval_snips:
_ = (
keymap.n(mapping.eval_snips)
<< f"<cmd>lua {NAMESPACE}.{eval_snips.method}(false)<cr>"
)
_ = (
keymap.v(mapping.eval_snips)
<< rf"<c-\><c-n><cmd>lua {NAMESPACE}.{eval_snips.method}(true)<cr>"
)
if mapping.bigger_preview:
_ = (
keymap.i(mapping.bigger_preview, expr=True)
<< f"(pumvisible() && complete_info(['mode']).mode ==# 'eval') ? {preview_preview.method}() : '{mapping.bigger_preview}'"
)
if mapping.jump_to_mark:
_ = (
keymap.n(mapping.jump_to_mark)
<< f"<cmd>lua {NAMESPACE}.{nav_mark.method}()<cr>"
)
_ = (
keymap.iv(mapping.jump_to_mark)
<< rf"<c-\><c-n><cmd>lua {NAMESPACE}.{nav_mark.method}()<cr>"
)
if mapping.repeat:
_ = keymap.n(mapping.repeat) << f"<cmd>lua {NAMESPACE}.{repeat.method}()<cr>"
if mapping.manual_complete:
_ = (
keymap.i(mapping.manual_complete, expr=True)
<< "pumvisible() ? '<c-e><c-x><c-u>' : '<c-x><c-u>'"
)
if not mapping.manual_complete_insertion_only:
_ = keymap.nv(mapping.manual_complete) << r"<c-\><c-n>i<c-x><c-u>"
settings["completeopt"] += (
"noinsert",
"menuone",
*(() if mapping.pre_select else ("noselect",)),
)
if mapping.recommended:
_ = keymap.i("<esc>", expr=True) << "pumvisible() ? '<c-e><esc>' : '<esc>'"
_ = keymap.i("<c-c>", expr=True) << "pumvisible() ? '<c-e><c-c>' : '<c-c>'"
_ = keymap.i("<bs>", expr=True) << "pumvisible() ? '<c-e><bs>' : '<bs>'"
_ = keymap.i("<c-w>", expr=True) << "pumvisible() ? '<c-e><c-w>' : '<c-w>'"
_ = keymap.i("<c-u>", expr=True) << "pumvisible() ? '<c-e><c-u>' : '<c-u>'"
_ = (
keymap.i("<cr>", expr=True)
<< "pumvisible() ? (complete_info(['selected']).selected == -1 ? '<c-e><cr>' : '<c-y>') : '<cr>'"
)
_ = keymap.i("<tab>", expr=True) << "pumvisible() ? '<c-n>' : '<tab>'"
_ = keymap.i("<s-tab>", expr=True) << "pumvisible() ? '<c-p>' : '<bs>'"
if fast_close:
settings["shortmess"] += "c"
await (settings.drain() + keymap.drain(buf=None)).commit(NoneType)
class ValidationError(Exception): ...
async def stack(th: ThreadPoolExecutor) -> Stack:
settings = await _settings()
pum_width = await Nvim.opts.get(int, "pumwidth")
vars_dir = (
Path(await Nvim.fn.stdpath(str, "cache")) / "coq" if settings.xdg else VARS
)
s = state(cwd=await Nvim.getcwd(), pum_width=pum_width)
idb = IDB()
reviewer = Reviewer(
icons=settings.display.icons,
options=settings.match,
db=idb,
)
supervisor = Supervisor(
th=th,
vars_dir=vars_dir,
display=settings.display,
match=settings.match,
comp=settings.completion,
limits=settings.limits,
reviewer=reviewer,
)
workers = {
*_from_each_according_to_their_ability(
settings,
vars_dir=vars_dir,
cwd=s.cwd,
supervisor=supervisor,
)
}
stack = Stack(
settings=settings,
lru=LRU(size=settings.match.max_results),
metrics={},
idb=idb,
supervisor=supervisor,
workers=workers,
)
return stack
async def init(socket: ServerAddr, ppid: int, th: ThreadPoolExecutor) -> None:
loop = get_running_loop()
loop.set_default_executor(th)
async with _autodie(ppid):
_set_debug(loop)
die: Future = Future()
async def cont() -> None:
async with conn(die, socket=socket, default=_default) as client:
try:
stk = await stack(th=th)
except (DecodeError, ValidationError) as e:
tpl = """
Some options may have changed.
See help doc on Github under [docs/CONFIGURATION.md]
⚠️ ${e}
"""
msg = Template(dedent(tpl)).substitute(e=e)
await Nvim.write(msg, error=True)
exit(1)
else:
rpc_atomic, handlers = rpc.drain()
for handler in handlers.values():
hldr = _trans(stk, handler=handler)
client.register(hldr)
await (rpc_atomic + autocmd.drain() + atomic).commit(NoneType)
await set_options(
mapping=stk.settings.keymap,
fast_close=stk.settings.display.pum.fast_close,
)
await gather(wrap_future(die), cont()) | null |
154,788 | from asyncio import (
AbstractEventLoop,
Condition,
get_running_loop,
run_coroutine_threadsafe,
wrap_future,
)
from dataclasses import dataclass
from functools import lru_cache
from itertools import count
from string import capwords
from typing import Generic, Iterable, Iterator, Optional, Sequence, Tuple, TypeVar
from pynvim_pp.lib import recode
from pynvim_pp.nvim import Nvim
from pynvim_pp.types import NoneType
from std2.cell import RefCell
from ..registry import NAMESPACE, rpc
from ..server.rt_types import Stack
from ..shared.timeit import timeit
from .types import Payload, RawPayload, SimplePayload, SimpleRawPayload
class _Payload(Generic[_T]):
class _Session:
_CELL = RefCell(_Session(uid=-1, done=True, payload=_NIL_P))
def _cond() -> Tuple[AbstractEventLoop, Condition]:
class Stack:
class RawPayload(SimpleRawPayload, TypedDict, total=False):
async def _ts_notify(
stack: Stack,
session: int,
buf: int,
lo: int,
hi: int,
filetype: str,
filename: str,
reply: Sequence[RawPayload],
elapsed: float,
) -> None:
origin, cond = _cond()
async def cont() -> None:
if session >= _CELL.val.uid:
payload = _Payload(
buf=buf,
lo=lo,
hi=hi,
filetype=filetype,
filename=filename,
payloads=reply,
elapsed=elapsed,
)
_CELL.val = _Session(uid=session, done=True, payload=payload)
async with cond:
cond.notify_all()
f = run_coroutine_threadsafe(cont(), loop=origin)
await wrap_future(f) | null |
154,789 | from asyncio import (
AbstractEventLoop,
Condition,
get_running_loop,
run_coroutine_threadsafe,
wrap_future,
)
from dataclasses import dataclass
from functools import lru_cache
from itertools import count
from string import capwords
from typing import Generic, Iterable, Iterator, Optional, Sequence, Tuple, TypeVar
from pynvim_pp.lib import recode
from pynvim_pp.nvim import Nvim
from pynvim_pp.types import NoneType
from std2.cell import RefCell
from ..registry import NAMESPACE, rpc
from ..server.rt_types import Stack
from ..shared.timeit import timeit
from .types import Payload, RawPayload, SimplePayload, SimpleRawPayload
class _Payload(Generic[_T]):
buf: int
lo: int
hi: int
filetype: str
filename: str
payloads: Iterable[_T]
elapsed: float
class _Session:
uid: int
done: bool
payload: _Payload
_UIDS = count()
_NIL_P = _Payload[RawPayload](
buf=-1, lo=-1, hi=-1, filetype="", filename="", payloads=(), elapsed=-1
)
_CELL = RefCell(_Session(uid=-1, done=True, payload=_NIL_P))
def _cond() -> Tuple[AbstractEventLoop, Condition]:
loop = get_running_loop()
return (loop, Condition())
def _vaildate(r_playload: _Payload[RawPayload]) -> _Payload[Payload]:
def cont() -> Iterator[Payload]:
for load in r_playload.payloads:
if payload := _parse(load):
range = load.get("range")
assert range
parent = _parse(load.get("parent"))
grandparent = _parse(load.get("grandparent"))
yield Payload(
filename="",
range=range,
text=payload.text,
kind=payload.kind,
parent=parent,
grandparent=grandparent,
)
payload = _Payload(
buf=r_playload.buf,
lo=r_playload.lo,
hi=r_playload.hi,
filetype=r_playload.filetype,
filename=r_playload.filename,
elapsed=r_playload.elapsed,
payloads=cont(),
)
return payload
NAMESPACE = "COQ"
def timeit(
name: str, *args: Any, force: bool = False, warn: Optional[float] = None
) -> Iterator[None]:
if DEBUG or force or warn is not None:
with _timeit() as t:
yield None
delta = t().total_seconds()
if DEBUG or force or delta >= (warn or 0):
times, cum = _RECORDS.get(name, (0, 0))
tt, c = times + 1, cum + delta
_RECORDS[name] = tt, c
label = name.ljust(50)
time = f"{si_prefixed_smol(delta, precision=0)}s".ljust(8)
ttime = f"{si_prefixed_smol(c / tt, precision=0)}s".ljust(8)
msg = f"TIME -- {label} :: {time} @ {ttime} {' '.join(map(str, args))}"
if force:
log.info("%s", msg)
else:
log.debug("%s", msg)
else:
yield None
class Payload(SimplePayload):
filename: str
range: Tuple[int, int]
parent: Optional[SimplePayload]
grandparent: Optional[SimplePayload]
async def async_request() -> Optional[_Payload[Payload]]:
_, cond = _cond()
with timeit("TS"):
uid = next(_UIDS)
_CELL.val = _Session(uid=uid, done=False, payload=_NIL_P)
async with cond:
cond.notify_all()
await Nvim.api.exec_lua(NoneType, f"{NAMESPACE}.ts_req(...)", (uid,))
while True:
session = _CELL.val
if session.uid == uid and session.done:
return _vaildate(session.payload)
elif session.uid > uid:
return None
else:
async with cond:
await cond.wait() | null |
154,790 | from asyncio import Semaphore, gather
from contextlib import suppress
from multiprocessing import cpu_count
from pathlib import Path
from typing import Any, Iterator, MutableMapping, MutableSet, Tuple
from urllib.parse import urlparse
from uuid import UUID
from std2.asyncio.subprocess import call
from std2.graphlib import recur_sort
from std2.pickle.decoder import new_decoder
from std2.pickle.encoder import new_encoder
from yaml import safe_load
from ..consts import COMPILATION_YML, TMP_DIR
from ..shared.context import EMPTY_CONTEXT
from ..shared.settings import EMPTY_COMP, EMPTY_MATCH
from ..shared.types import SnippetEdit
from ..snippets.loaders.load import load_ci as load_from_paths
from ..snippets.parse import parse_basic
from ..snippets.parsers.types import ParseError, ParseInfo
from ..snippets.types import LoadedSnips, ParsedSnippet
from .snip_trans import trans
from .types import Compilation
async def load() -> LoadedSnips:
TMP_DIR.mkdir(parents=True, exist_ok=True)
yaml = safe_load(COMPILATION_YML.read_bytes())
specs = new_decoder[Compilation](Compilation)(yaml)
sem = Semaphore(value=cpu_count())
await gather(*(_git_pull(sem, uri=uri) for uri in specs.git))
parsed = load_from_paths(
trans,
lsp=(TMP_DIR / path for path in specs.paths.lsp),
neosnippet=(TMP_DIR / path for path in specs.paths.neosnippet),
ultisnip=(TMP_DIR / path for path in specs.paths.ultisnip),
)
exts: MutableMapping[str, MutableSet[str]] = {}
for key, values in parsed.exts.items():
exts.setdefault(key, {*values})
for key, vals in specs.remaps.items():
acc = exts.setdefault(key, set())
for value in vals:
acc.add(value)
merged = LoadedSnips(snippets=parsed.snippets, exts=exts)
return merged
EMPTY_CONTEXT = Context(
manual=True,
change_id=uuid4(),
commit_id=uuid4(),
cwd=PurePath(),
buf_id=0,
filename=normcase(_FILE),
filetype="",
line_count=0,
linefeed="\n",
tabstop=2,
expandtab=True,
comment=("", ""),
position=(0, 0),
cursor=(0, 0, 0, 0),
scr_col=0,
win_size=0,
line="",
line_before="",
line_after="",
lines=(),
lines_before=(),
lines_after=(),
words="",
words_before="",
words_after="",
syms="",
syms_before="",
syms_after="",
ws_before="",
ws_after="",
l_words_before="",
l_words_after="",
l_syms_before="",
l_syms_after="",
is_lower=True,
change=None,
)
EMPTY_MATCH = MatchOptions(
unifying_chars=set(),
max_results=0,
look_ahead=0,
exact_matches=0,
fuzzy_cutoff=0,
)
EMPTY_COMP = CompleteOptions(
always=False,
smart=True,
replace_prefix_threshold=0,
replace_suffix_threshold=0,
skip_after=set(),
)
class SnippetEdit(Edit):
grammar: SnippetGrammar
def parse_basic(
match: MatchOptions,
comp: CompleteOptions,
adjust_indent: bool,
context: Context,
snippet: SnippetEdit,
info: ParseInfo,
) -> Tuple[Edit, Sequence[Mark], TextTransforms]:
parser = _parser(snippet.grammar)
sort_by = parser(context, info, snippet.new_text).text
trans_ctx = trans_adjusted(match, comp=comp, ctx=context, new_text=sort_by)
old_prefix, old_suffix = trans_ctx.old_prefix, trans_ctx.old_suffix
line_before = removesuffix(context.line_before, suffix=old_prefix)
indented = (
SNIP_LINE_SEP.join(
indent_adjusted(
context, line_before=line_before, lines=snippet.new_text.splitlines()
)
)
if adjust_indent
else snippet.new_text
)
parsed = parser(context, info, indented)
new_prefix = parsed.text[: parsed.cursor]
new_lines = parsed.text.split(SNIP_LINE_SEP)
new_text = context.linefeed.join(new_lines)
edit = ContextualEdit(
new_text=new_text,
old_prefix=old_prefix,
old_suffix=old_suffix,
new_prefix=new_prefix,
)
marks = _marks(
context.position,
l0_before=line_before,
new_lines=new_lines,
regions=parsed.regions,
)
return edit, marks, parsed.xforms
class ParseError(Exception): ...
class ParseInfo:
visual: str
clipboard: str
comment_str: Tuple[str, str]
class ParsedSnippet:
grammar: SnippetGrammar
filetype: str
content: str
label: str
doc: str
matches: AbstractSet[str]
class LoadedSnips:
exts: Mapping[str, AbstractSet[str]]
snippets: Mapping[UUID, ParsedSnippet]
async def load_parsable() -> Any:
loaded = await load()
def cont() -> Iterator[Tuple[UUID, ParsedSnippet]]:
for uid, snip in loaded.snippets.items():
edit = SnippetEdit(
new_text=snip.content,
grammar=snip.grammar,
)
with suppress(ParseError):
parse_basic(
EMPTY_MATCH,
comp=EMPTY_COMP,
adjust_indent=False,
context=EMPTY_CONTEXT,
snippet=edit,
info=ParseInfo(visual="", clipboard="", comment_str=("", "")),
)
yield uid, snip
snippets = {hashed: snip for hashed, snip in cont()}
safe = LoadedSnips(exts=loaded.exts, snippets=snippets)
coder = new_encoder[LoadedSnips](LoadedSnips)
return recur_sort(coder(safe)) | null |
154,791 | from contextlib import closing, suppress
from os.path import normcase
from pathlib import Path, PurePath
from sqlite3 import Connection, OperationalError
from typing import AbstractSet, Iterator, Mapping, TypedDict, cast
from uuid import uuid4
from ....databases.types import DB
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from ....snippets.types import LoadedSnips
from .sql import sql
_SCHEMA = "v4"
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init(db_dir: Path) -> Connection:
db = (db_dir / _SCHEMA).with_suffix(".sqlite3")
db.parent.mkdir(parents=True, exist_ok=True)
conn = Connection(db, isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,792 | from dataclasses import dataclass, replace
from itertools import chain
from typing import (
AbstractSet,
Awaitable,
Iterable,
Iterator,
Mapping,
MutableMapping,
MutableSet,
Optional,
Tuple,
)
from uuid import UUID, uuid4
from ...shared.fuzzy import multi_set_ratio
from ...shared.parse import coalesce
from ...shared.repeat import sanitize
from ...shared.runtime import Supervisor
from ...shared.settings import MatchOptions
from ...shared.timeit import timeit
from ...shared.types import BaseRangeEdit, Completion, Context, Cursors, Interruptible
from .db.database import Database
class _CacheCtx:
change_id: UUID
commit_id: UUID
buf_id: int
row: int
col: int
syms_before: str
def multi_set_ratio(lhs: str, rhs: str, look_ahead: int) -> float:
"""
Test intersection size, adjust for length
"""
shorter = min(len(lhs), len(rhs))
if not shorter:
return 1
else:
cutoff = shorter + look_ahead
l, r = lhs[:cutoff], rhs[:cutoff]
longer = max(len(l), len(r))
l_c, r_c = Counter(l), Counter(r)
dif = l_c - r_c if len(l) > len(r) else r_c - l_c
ratio = 1 - sum(dif.values()) / longer
adjust = shorter / longer
return ratio / adjust
class MatchOptions:
unifying_chars: AbstractSet[str]
max_results: int
look_ahead: int
exact_matches: int
fuzzy_cutoff: float
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
def _use_cache(match: MatchOptions, cache: _CacheCtx, ctx: Context) -> bool:
row, _ = ctx.position
use_cache = (
not ctx.manual
and cache.commit_id == ctx.commit_id
and ctx.buf_id == cache.buf_id
and row == cache.row
and multi_set_ratio(
ctx.syms_before, cache.syms_before, look_ahead=match.look_ahead
)
>= match.fuzzy_cutoff
)
return use_cache | null |
154,793 | from dataclasses import dataclass, replace
from itertools import chain
from typing import (
AbstractSet,
Awaitable,
Iterable,
Iterator,
Mapping,
MutableMapping,
MutableSet,
Optional,
Tuple,
)
from uuid import UUID, uuid4
from ...shared.fuzzy import multi_set_ratio
from ...shared.parse import coalesce
from ...shared.repeat import sanitize
from ...shared.runtime import Supervisor
from ...shared.settings import MatchOptions
from ...shared.timeit import timeit
from ...shared.types import BaseRangeEdit, Completion, Context, Cursors, Interruptible
from .db.database import Database
def _overlap(row: int, edit: BaseRangeEdit) -> bool:
(b_row, _), (e_row, _) = edit.begin, edit.end
return b_row == row or e_row == row
def sanitize(cursor: Cursors, edit: Edit) -> Optional[Edit]:
row, *_ = cursor
if isinstance(edit, SnippetRangeEdit):
if row == -1:
if edit.fallback == edit.new_text:
return SnippetEdit(grammar=edit.grammar, new_text=edit.new_text)
elif not requires_snip(edit.new_text):
return Edit(new_text=edit.new_text)
else:
return None
elif fallback := edit.fallback:
return SnippetEdit(grammar=edit.grammar, new_text=fallback)
elif not requires_snip(edit.new_text):
return Edit(new_text=edit.new_text)
else:
begin, end = _shift(cursor, edit=edit)
return replace(edit, begin=begin, end=end)
elif isinstance(edit, RangeEdit):
if fallback := edit.fallback:
return Edit(new_text=fallback)
elif not requires_snip(edit.new_text):
return Edit(new_text=edit.new_text)
else:
return None
elif isinstance(edit, SnippetEdit):
return edit
else:
return Edit(new_text=edit.new_text)
Cursors = Tuple[int, NvimCursor, WTF8Cursor, WTF8Cursor]
class Completion:
source: str
always_on_top: bool
weight_adjust: float
label: str
sort_by: str
primary_edit: Edit
adjust_indent: bool
icon_match: Optional[str]
uid: UUID = field(default_factory=uuid4)
secondary_edits: Sequence[RangeEdit] = ()
preselect: bool = False
kind: str = ""
doc: Optional[Doc] = None
extern: Union[ExternLSP, ExternLUA, ExternPath, None] = None
def sanitize_cached(
cursor: Cursors, comp: Completion, sort_by: Optional[str]
) -> Optional[Completion]:
if edit := sanitize(cursor, edit=comp.primary_edit):
row, *_ = cursor
cached = replace(
comp,
primary_edit=edit,
secondary_edits=tuple(
edit for edit in comp.secondary_edits if not _overlap(row, edit=edit)
),
sort_by=sort_by or comp.sort_by,
)
return cached
else:
return None | null |
154,794 | from contextlib import closing, suppress
from sqlite3 import Connection, OperationalError
from typing import Iterable, Iterator, Mapping, Tuple
from ....databases.types import DB
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from .sql import sql
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init() -> Connection:
conn = Connection(":memory:", isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,795 | from asyncio import Lock
from os import linesep
from pathlib import Path
from typing import AsyncIterator, Iterator
from pynvim_pp.logging import suppress_and_log
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import TmuxClient
from ...shared.timeit import timeit
from ...shared.types import Completion, Context, Doc, Edit
from ...tmux.parse import snapshot
from .db.database import TMDB, TmuxWord
class TmuxClient(_WordbankClient, TagsClient, _AlwaysTop):
all_sessions: bool
class Doc:
text: str
syntax: str
class TmuxWord:
text: str
session_name: str
window_index: int
window_name: str
pane_index: int
pane_title: str
def _doc(client: TmuxClient, word: TmuxWord) -> Doc:
def cont() -> Iterator[str]:
if client.all_sessions:
yield f"S: {word.session_name}{client.parent_scope}"
yield f"W: #{word.window_index}{client.path_sep}{word.window_name}{client.parent_scope}"
yield f"P: #{word.pane_index}{client.path_sep}{word.pane_title}"
return Doc(text=linesep.join(cont()), syntax="") | null |
154,796 | from contextlib import closing, suppress
from dataclasses import dataclass
from sqlite3 import Connection, OperationalError
from typing import AbstractSet, Iterator, Mapping, MutableMapping, Optional
from ....consts import TMUX_DB
from ....databases.types import DB
from ....shared.parse import tokenize
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from ....tmux.parse import Pane
from .sql import sql
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init() -> Connection:
conn = Connection(TMUX_DB, isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,797 | import sys
from asyncio import (
IncompleteReadError,
LimitOverrunError,
StreamReader,
create_subprocess_exec,
gather,
shield,
sleep,
)
from asyncio.locks import Lock
from asyncio.subprocess import Process
from contextlib import suppress
from itertools import chain, count
from json import dumps, loads
from json.decoder import JSONDecodeError
from pathlib import PurePath
from subprocess import DEVNULL, PIPE
from typing import Any, AsyncIterator, Iterator, Mapping, Optional, Sequence, cast
from pynvim_pp.lib import decode, encode
from pynvim_pp.logging import log, suppress_and_log
from pynvim_pp.nvim import Nvim
from std2.pickle.decoder import new_decoder
from std2.pickle.encoder import new_encoder
from std2.pickle.types import DecodeError
from std2.platform import OS, os
from ...consts import DEBUG
from ...lang import LANG
from ...lsp.protocol import LSProtocol, protocol
from ...shared.executor import AsyncExecutor, very_nice
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import T9Client
from ...shared.types import Completion, Context, ContextualEdit, Doc
from .install import ensure_updated, t9_bin, x_ok
from .types import ReqL1, ReqL2, Request, RespL1, Response
_VERSION = "4.5.10"
_ENCODER = new_encoder[Request](Request)
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
class ReqL2:
correlation_id: int
before: str
after: str
filename: str
region_includes_beginning: bool
region_includes_end: bool
max_num_results: Optional[int] = None
class ReqL1:
Autocomplete: ReqL2
class Request:
request: ReqL1
version: str
def _encode(context: Context, id: int, limit: int) -> Any:
row, _ = context.position
before = context.linefeed.join(chain(context.lines_before, (context.line_before,)))
after = context.linefeed.join(chain((context.line_after,), context.lines_after))
ibg = row - context.win_size <= 0
ieof = row + context.win_size >= context.line_count
l2 = ReqL2(
correlation_id=id,
filename=context.filename,
before=before,
after=after,
region_includes_beginning=ibg,
region_includes_end=ieof,
max_num_results=None if context.manual else limit,
)
l1 = ReqL1(Autocomplete=l2)
req = Request(request=l1, version=_VERSION)
return _ENCODER(req) | null |
154,798 | import sys
from asyncio import (
IncompleteReadError,
LimitOverrunError,
StreamReader,
create_subprocess_exec,
gather,
shield,
sleep,
)
from asyncio.locks import Lock
from asyncio.subprocess import Process
from contextlib import suppress
from itertools import chain, count
from json import dumps, loads
from json.decoder import JSONDecodeError
from pathlib import PurePath
from subprocess import DEVNULL, PIPE
from typing import Any, AsyncIterator, Iterator, Mapping, Optional, Sequence, cast
from pynvim_pp.lib import decode, encode
from pynvim_pp.logging import log, suppress_and_log
from pynvim_pp.nvim import Nvim
from std2.pickle.decoder import new_decoder
from std2.pickle.encoder import new_encoder
from std2.pickle.types import DecodeError
from std2.platform import OS, os
from ...consts import DEBUG
from ...lang import LANG
from ...lsp.protocol import LSProtocol, protocol
from ...shared.executor import AsyncExecutor, very_nice
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import T9Client
from ...shared.types import Completion, Context, ContextualEdit, Doc
from .install import ensure_updated, t9_bin, x_ok
from .types import ReqL1, ReqL2, Request, RespL1, Response
_DECODER = new_decoder[RespL1](RespL1, strict=False)
class LSProtocol:
CompletionItemKind: Mapping[Optional[int], str]
InsertTextFormat: Mapping[Optional[int], str]
class T9Client(BaseClient, _AlwaysTop): ...
class ContextualEdit(Edit):
"""
<new_prefix>🐭<new_suffix>
"""
old_prefix: str
new_prefix: str
old_suffix: str = ""
class Doc:
text: str
syntax: str
class Completion:
source: str
always_on_top: bool
weight_adjust: float
label: str
sort_by: str
primary_edit: Edit
adjust_indent: bool
icon_match: Optional[str]
uid: UUID = field(default_factory=uuid4)
secondary_edits: Sequence[RangeEdit] = ()
preselect: bool = False
kind: str = ""
doc: Optional[Doc] = None
extern: Union[ExternLSP, ExternLUA, ExternPath, None] = None
class Response(TypedDict):
correlation_id: int
old_prefix: str
is_locked: bool
user_message: Sequence[str]
results: Sequence[Any]
def _decode(
protocol: LSProtocol,
client: T9Client,
ellipsis: str,
syntax: str,
id: int,
reply: Response,
) -> Iterator[Completion]:
if (
not isinstance(reply, Mapping)
or ((r_id := reply.get("correlation_id")) and r_id != id)
or not isinstance((old_prefix := reply.get("old_prefix")), str)
or not isinstance((results := reply.get("results")), Sequence)
):
log.warn("%s", reply)
else:
for result in results:
try:
resp = _DECODER(result)
except DecodeError as e:
log.warn("%s", e)
else:
new_text = resp.new_prefix + resp.new_suffix
edit = ContextualEdit(
old_prefix=old_prefix,
new_prefix=resp.new_prefix,
old_suffix=resp.old_suffix,
new_text=new_text,
)
pre_lines = resp.new_prefix.splitlines() or ("",)
post_lines = resp.new_suffix.splitlines() or ("",)
label_pre, *pre = pre_lines
label_post, *post = post_lines
e_pre = ellipsis if pre else ""
e_post = ellipsis if post else ""
label = label_pre + e_pre + label_post + e_post
*_, s_pre = pre_lines
s_post, *_ = post_lines
sort_by = s_pre + s_post
doc = Doc(text=new_text, syntax=syntax) if e_pre or e_post else None
kind = protocol.CompletionItemKind.get(resp.kind)
cmp = Completion(
source=client.short_name,
always_on_top=client.always_on_top,
weight_adjust=client.weight_adjust,
label=label,
sort_by=sort_by,
primary_edit=edit,
adjust_indent=False,
kind=kind or "",
icon_match=kind,
doc=doc,
)
yield cmp | null |
154,799 | import sys
from asyncio import (
IncompleteReadError,
LimitOverrunError,
StreamReader,
create_subprocess_exec,
gather,
shield,
sleep,
)
from asyncio.locks import Lock
from asyncio.subprocess import Process
from contextlib import suppress
from itertools import chain, count
from json import dumps, loads
from json.decoder import JSONDecodeError
from pathlib import PurePath
from subprocess import DEVNULL, PIPE
from typing import Any, AsyncIterator, Iterator, Mapping, Optional, Sequence, cast
from pynvim_pp.lib import decode, encode
from pynvim_pp.logging import log, suppress_and_log
from pynvim_pp.nvim import Nvim
from std2.pickle.decoder import new_decoder
from std2.pickle.encoder import new_encoder
from std2.pickle.types import DecodeError
from std2.platform import OS, os
from ...consts import DEBUG
from ...lang import LANG
from ...lsp.protocol import LSProtocol, protocol
from ...shared.executor import AsyncExecutor, very_nice
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import T9Client
from ...shared.types import Completion, Context, ContextualEdit, Doc
from .install import ensure_updated, t9_bin, x_ok
from .types import ReqL1, ReqL2, Request, RespL1, Response
def _nice() -> None:
with suppress(PermissionError):
nice(19)
DEBUG = "COQ_DEBUG" in environ
async def very_nice() -> Sequence[str]:
f: Future = _very_nice()
return await wrap_future(f)
async def _proc(bin: PurePath, cwd: PurePath) -> Optional[Process]:
kwargs = {} if os is OS.windows else {"preexec_fn": _nice}
prefix = await very_nice()
log = (f"--log-file-path={cwd / 't9.log'}",) if DEBUG else ()
try:
proc = await create_subprocess_exec(
*prefix,
bin,
"--client=coq.nvim",
*log,
stdin=PIPE,
stdout=PIPE,
stderr=DEVNULL,
cwd=cwd,
creationflags=BELOW_NORMAL_PRIORITY_CLASS,
**kwargs, # type: ignore
)
except FileNotFoundError:
return None
else:
return proc | null |
154,800 | import sys
from asyncio import (
IncompleteReadError,
LimitOverrunError,
StreamReader,
create_subprocess_exec,
gather,
shield,
sleep,
)
from asyncio.locks import Lock
from asyncio.subprocess import Process
from contextlib import suppress
from itertools import chain, count
from json import dumps, loads
from json.decoder import JSONDecodeError
from pathlib import PurePath
from subprocess import DEVNULL, PIPE
from typing import Any, AsyncIterator, Iterator, Mapping, Optional, Sequence, cast
from pynvim_pp.lib import decode, encode
from pynvim_pp.logging import log, suppress_and_log
from pynvim_pp.nvim import Nvim
from std2.pickle.decoder import new_decoder
from std2.pickle.encoder import new_encoder
from std2.pickle.types import DecodeError
from std2.platform import OS, os
from ...consts import DEBUG
from ...lang import LANG
from ...lsp.protocol import LSProtocol, protocol
from ...shared.executor import AsyncExecutor, very_nice
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import T9Client
from ...shared.types import Completion, Context, ContextualEdit, Doc
from .install import ensure_updated, t9_bin, x_ok
from .types import ReqL1, ReqL2, Request, RespL1, Response
_NL = b"\n"
async def _readline(stdout: StreamReader) -> bytes:
acc = bytearray()
while True:
try:
b = await stdout.readuntil(_NL)
except LimitOverrunError as e:
c = await stdout.readexactly(e.consumed)
acc.extend(c)
else:
acc.extend(b)
break
return acc | null |
154,801 | from asyncio import sleep
from contextlib import suppress
from io import BytesIO
from os import X_OK, access, sep
from os.path import normpath
from pathlib import Path, PurePath
from platform import uname
from shutil import move
from socket import timeout as TimeoutE
from string import Template
from tempfile import TemporaryDirectory
from typing import Callable, Mapping, Optional, Tuple
from urllib.error import URLError
from zipfile import ZipFile
from pynvim_pp.lib import decode
from pynvim_pp.logging import log
from std2.asyncio import to_thread
from std2.platform import OS, os
from std2.urllib import urlopen
def t9_bin(vars_dir: Path) -> Path:
return vars_dir / _T9_EXEC
def _update(vars_dir: Path, timeout: float) -> bool:
vars_dir.mkdir(parents=True, exist_ok=True)
lock = vars_dir / "versions.lock"
try:
p_uri = lock.read_text()
except FileNotFoundError:
p_uri = ""
uri = _uri(timeout)
if not uri:
return False
else:
if not x_ok(vars_dir) or uri != p_uri:
with urlopen(uri, timeout=timeout) as resp:
buf = BytesIO(resp.read())
with TemporaryDirectory(dir=vars_dir) as tmp:
with ZipFile(buf) as zip:
zip.extractall(path=tmp)
for child in Path(tmp).iterdir():
child.chmod(_X_MOD)
move(normpath(child), vars_dir / child.name)
lock.write_text(uri)
lock.chmod(_X_MOD)
return True
async def ensure_updated(
vars_dir: Path, retries: int, timeout: float
) -> Optional[PurePath]:
bin = t9_bin(vars_dir)
for _ in range(retries):
try:
cont = await to_thread(_update, vars_dir=vars_dir, timeout=timeout)
except (URLError, TimeoutE) as e:
log.warn("%s", e)
await sleep(timeout)
else:
if not cont:
return None
elif access(bin, X_OK):
return bin
else:
return None | null |
154,802 | from contextlib import suppress
from os import linesep
from os.path import normcase
from pathlib import Path, PurePath
from string import capwords
from typing import (
AbstractSet,
AsyncIterator,
Iterable,
Iterator,
Mapping,
MutableSet,
Tuple,
)
from pynvim_pp.atomic import Atomic
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.rpc_types import NvimError
from std2.asyncio import to_thread
from ...paths.show import fmt_path
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import TagsClient
from ...shared.timeit import timeit
from ...shared.types import Completion, Context, Doc, Edit
from ...tags.parse import parse, run
from ...tags.types import Tag
from .db.database import CTDB
async def _ls() -> AbstractSet[str]:
try:
bufs = await Buffer.list(listed=True)
atomic = Atomic()
for buf in bufs:
atomic.buf_get_name(buf)
names = await atomic.commit(str)
except NvimError:
return set()
else:
return {*names} | null |
154,803 | from contextlib import suppress
from os import linesep
from os.path import normcase
from pathlib import Path, PurePath
from string import capwords
from typing import (
AbstractSet,
AsyncIterator,
Iterable,
Iterator,
Mapping,
MutableSet,
Tuple,
)
from pynvim_pp.atomic import Atomic
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.rpc_types import NvimError
from std2.asyncio import to_thread
from ...paths.show import fmt_path
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import TagsClient
from ...shared.timeit import timeit
from ...shared.types import Completion, Context, Doc, Edit
from ...tags.parse import parse, run
from ...tags.types import Tag
from .db.database import CTDB
async def _mtimes(paths: AbstractSet[str]) -> Mapping[str, float]:
def c1() -> Iterable[Tuple[Path, float]]:
for path in map(Path, paths):
with suppress(OSError):
stat = path.stat()
yield path, stat.st_mtime
c2 = lambda: {normcase(key): val for key, val in c1()}
return await to_thread(c2) | null |
154,804 | from contextlib import suppress
from os import linesep
from os.path import normcase
from pathlib import Path, PurePath
from string import capwords
from typing import (
AbstractSet,
AsyncIterator,
Iterable,
Iterator,
Mapping,
MutableSet,
Tuple,
)
from pynvim_pp.atomic import Atomic
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.rpc_types import NvimError
from std2.asyncio import to_thread
from ...paths.show import fmt_path
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import TagsClient
from ...shared.timeit import timeit
from ...shared.types import Completion, Context, Doc, Edit
from ...tags.parse import parse, run
from ...tags.types import Tag
from .db.database import CTDB
def fmt_path(
cwd: PurePath, path: PurePath, is_dir: bool, current: Optional[PurePath] = None
) -> str:
if path == current:
return curdir
posfix = sep if is_dir else ""
with suppress(ValueError):
rel = path.relative_to(cwd)
return f"{curdir}{sep}{normcase(rel)}{posfix}"
with suppress(ValueError):
rel = path.relative_to(_HOME)
return f"~{sep}{normcase(rel)}{posfix}"
return f"{normcase(path)}{posfix}"
class TagsClient(BaseClient, _AlwaysTop):
parent_scope: str
path_sep: str
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
class Doc:
text: str
syntax: str
class Tag(TypedDict):
language: str
path: str
line: int
name: str
pattern: Optional[str]
kind: str
typeref: Optional[str]
scope: Optional[str]
scopeKind: Optional[str]
access: Optional[str]
def _doc(client: TagsClient, context: Context, tag: Tag) -> Doc:
def cont() -> Iterator[str]:
lc, rc = context.comment
path = PurePath(tag["path"])
pos = fmt_path(
context.cwd, path=path, is_dir=False, current=PurePath(context.filename)
)
yield lc
yield pos
yield ":"
yield str(tag["line"])
yield rc
yield linesep
scope_kind = tag["scopeKind"] or None
scope = tag["scope"] or None
if scope_kind and scope:
yield lc
yield scope_kind
yield client.path_sep
yield scope
yield client.parent_scope
yield rc
yield linesep
elif scope_kind:
yield lc
yield scope_kind
yield client.parent_scope
yield rc
yield linesep
elif scope:
yield lc
yield scope
yield client.parent_scope
yield rc
yield linesep
access = tag["access"] or None
_, _, ref = (tag.get("typeref") or "").partition(":")
if access and ref:
yield lc
yield access
yield client.path_sep
yield tag["kind"]
yield client.path_sep
yield ref
yield rc
yield linesep
elif access:
yield lc
yield access
yield client.path_sep
yield tag["kind"]
yield rc
yield linesep
elif ref:
yield lc
yield tag["kind"]
yield client.path_sep
yield ref
yield rc
yield linesep
yield tag["pattern"] or tag["name"]
doc = Doc(
text="".join(cont()),
syntax=context.filetype,
)
return doc | null |
154,805 | from contextlib import closing, suppress
from hashlib import md5
from os.path import normcase
from pathlib import Path, PurePath
from sqlite3 import Connection, OperationalError
from typing import AbstractSet, Iterator, Mapping, cast
from pynvim_pp.lib import encode
from ....databases.types import DB
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from ....tags.types import Tag, Tags
from .sql import sql
_SCHEMA = "v5"
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init(db_dir: Path, cwd: PurePath) -> Connection:
ncwd = normcase(cwd)
name = f"{md5(encode(ncwd)).hexdigest()}-{_SCHEMA}"
db = (db_dir / name).with_suffix(".sqlite3")
db.parent.mkdir(parents=True, exist_ok=True)
conn = Connection(str(db), isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,806 | from asyncio import as_completed
from contextlib import suppress
from itertools import chain, islice
from os import environ, scandir
from os.path import altsep, curdir, expanduser, expandvars, normpath, pardir, sep, split
from pathlib import Path
from string import ascii_letters, digits
from typing import (
AbstractSet,
AsyncIterator,
Iterator,
MutableSequence,
MutableSet,
Tuple,
)
from std2.asyncio import to_thread
from std2.platform import OS, os
from std2.string import removesuffix
from ...shared.context import cword_before
from ...shared.executor import AsyncExecutor
from ...shared.fuzzy import quick_ratio
from ...shared.parse import lower
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import PathResolution, PathsClient
from ...shared.sql import BIGGEST_INT
from ...shared.types import Completion, Context, Edit, ExternPath
def parse(
seps: AbstractSet[str],
look_ahead: int,
fuzzy_cutoff: float,
base: Path,
line: str,
) -> Iterator[Tuple[Path, bool, str]]:
for segment, s0 in _iter_segs(seps, line=line):
local_sep = _p_sep(s0)
if not {*s0}.issubset({sep, altsep}):
p = Path(s0)
entire = p if p.is_absolute() else base / p
with suppress(OSError):
if entire.is_dir():
for path in scandir(entire):
is_dir = path.is_dir()
term = local_sep if is_dir else ""
line = _join(local_sep, lhs=segment, rhs=path.name) + term
yield Path(path.path), is_dir, line
return
else:
lft, go, rhs = s0.rpartition(local_sep)
if go:
lp, sp, _ = segment.rpartition(local_sep)
lseg = lp + sp
lhs = lft + go
p = Path(lhs)
left = p if p.is_absolute() else base / p
if left.is_dir():
for path in scandir(left):
ratio = quick_ratio(
lower(rhs),
lower(path.name),
look_ahead=look_ahead,
)
if (
ratio >= fuzzy_cutoff
and len(path.name) + look_ahead >= len(rhs)
and not rhs.startswith(path.name)
):
is_dir = path.is_dir()
term = local_sep if is_dir else ""
line = (
_join(local_sep, lhs=lseg, rhs=path.name) + term
)
yield Path(path.path), is_dir, line
return
async def _parse(
base: Path,
line: str,
seps: AbstractSet[str],
limit: int,
look_ahead: int,
fuzzy_cutoff: float,
) -> AbstractSet[Tuple[Path, bool, str]]:
def cont() -> AbstractSet[Tuple[Path, bool, str]]:
return {
*islice(
parse(
seps,
look_ahead=look_ahead,
fuzzy_cutoff=fuzzy_cutoff,
base=base,
line=line,
),
limit,
)
}
return await to_thread(cont) | null |
154,807 | from asyncio import as_completed
from contextlib import suppress
from itertools import chain, islice
from os import environ, scandir
from os.path import altsep, curdir, expanduser, expandvars, normpath, pardir, sep, split
from pathlib import Path
from string import ascii_letters, digits
from typing import (
AbstractSet,
AsyncIterator,
Iterator,
MutableSequence,
MutableSet,
Tuple,
)
from std2.asyncio import to_thread
from std2.platform import OS, os
from std2.string import removesuffix
from ...shared.context import cword_before
from ...shared.executor import AsyncExecutor
from ...shared.fuzzy import quick_ratio
from ...shared.parse import lower
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import PathResolution, PathsClient
from ...shared.sql import BIGGEST_INT
from ...shared.types import Completion, Context, Edit, ExternPath
def cword_before(
unifying_chars: AbstractSet[str], lower: bool, context: Context, sort_by: str
) -> str:
char = sort_by[:1]
if char.isspace():
return context.ws_before
elif is_word(unifying_chars, chr=char):
return context.l_words_before if lower else context.words_before
else:
return context.l_syms_before if lower else context.syms_before
def lower(text: str) -> str:
return text.casefold()
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
def _sort_by(unifying_chars: AbstractSet[str], context: Context, new_text: str) -> str:
chars = [*new_text]
if new_text.endswith(sep) or (altsep and new_text.endswith(altsep)):
end = chars.pop()
else:
end = ""
tmp = "".join(chars)
cword = cword_before(unifying_chars, lower=False, context=context, sort_by=tmp)
sort_by = f"{cword}{end}"
return sort_by | null |
154,808 | from asyncio import Condition, as_completed, sleep
from dataclasses import dataclass, field
from enum import Enum, auto
from typing import (
AbstractSet,
AsyncIterator,
Iterator,
MutableMapping,
MutableSequence,
Optional,
Tuple,
)
from pynvim_pp.logging import suppress_and_log
from std2 import anext
from std2.itertools import batched
from ...lsp.requests.completion import comp_lsp
from ...lsp.types import LSPcomp
from ...shared.context import cword_before
from ...shared.executor import AsyncExecutor
from ...shared.fuzzy import multi_set_ratio
from ...shared.parse import lower
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import LSPClient, MatchOptions
from ...shared.sql import BIGGEST_INT
from ...shared.timeit import timeit
from ...shared.types import Completion, Context, Edit, SnippetEdit
from ..cache.worker import CacheWorker, sanitize_cached
def cword_before(
unifying_chars: AbstractSet[str], lower: bool, context: Context, sort_by: str
) -> str:
char = sort_by[:1]
if char.isspace():
return context.ws_before
elif is_word(unifying_chars, chr=char):
return context.l_words_before if lower else context.words_before
else:
return context.l_syms_before if lower else context.syms_before
def multi_set_ratio(lhs: str, rhs: str, look_ahead: int) -> float:
"""
Test intersection size, adjust for length
"""
shorter = min(len(lhs), len(rhs))
if not shorter:
return 1
else:
cutoff = shorter + look_ahead
l, r = lhs[:cutoff], rhs[:cutoff]
longer = max(len(l), len(r))
l_c, r_c = Counter(l), Counter(r)
dif = l_c - r_c if len(l) > len(r) else r_c - l_c
ratio = 1 - sum(dif.values()) / longer
adjust = shorter / longer
return ratio / adjust
def lower(text: str) -> str:
return text.casefold()
class MatchOptions:
unifying_chars: AbstractSet[str]
max_results: int
look_ahead: int
exact_matches: int
fuzzy_cutoff: float
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
class Edit:
new_text: str
class SnippetEdit(Edit):
grammar: SnippetGrammar
def _use_comp(match: MatchOptions, context: Context, sort_by: str, edit: Edit) -> bool:
cword = cword_before(
match.unifying_chars,
lower=True,
context=context,
sort_by=sort_by,
)
if len(sort_by) + match.look_ahead >= len(cword):
ratio = multi_set_ratio(
cword,
lower(sort_by),
look_ahead=match.look_ahead,
)
use = ratio >= match.fuzzy_cutoff and (
isinstance(edit, SnippetEdit) or not cword.startswith(edit.new_text)
)
return use
else:
return False | null |
154,809 | from typing import AbstractSet, AsyncIterator, Mapping, MutableSet
from pynvim_pp.atomic import Atomic
from pynvim_pp.logging import suppress_and_log
from std2.string import removesuffix
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import RegistersClient
from ...shared.types import Completion, Context, Doc, Edit, SnippetEdit, SnippetGrammar
from .db.database import RDB
async def _registers(names: AbstractSet[str]) -> Mapping[str, str]:
atomic = Atomic()
for name in names:
atomic.call_function("getreg", (name,))
contents = await atomic.commit(str)
return {name: txt for name, txt in zip(names, contents)} | null |
154,810 | from contextlib import closing, suppress
from dataclasses import dataclass
from sqlite3 import Connection, Cursor, OperationalError
from typing import AbstractSet, Any, Iterator, Mapping
from ....consts import REGISTER_DB
from ....databases.types import DB
from ....shared.parse import coalesce, tokenize
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from .sql import sql
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init() -> Connection:
conn = Connection(REGISTER_DB, isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,811 | from asyncio import Lock, gather
from os import linesep
from pathlib import PurePath
from typing import AsyncIterator, Iterator, Mapping, Optional, Tuple
from pynvim_pp.atomic import Atomic
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.rpc_types import NvimError
from ...paths.show import fmt_path
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import TSClient
from ...shared.types import Completion, Context, Doc, Edit
from ...treesitter.request import async_request
from ...treesitter.types import Payload
from .db.database import TDB
async def _bufs() -> Mapping[int, int]:
try:
bufs = await Buffer.list(listed=True)
atomic = Atomic()
for buf in bufs:
atomic.buf_line_count(buf)
linecounts = await atomic.commit(int)
counts = {
int(buf.number): linecount for buf, linecount in zip(bufs, linecounts)
}
return counts
except NvimError:
return {} | null |
154,812 | from asyncio import Lock, gather
from os import linesep
from pathlib import PurePath
from typing import AsyncIterator, Iterator, Mapping, Optional, Tuple
from pynvim_pp.atomic import Atomic
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.rpc_types import NvimError
from ...paths.show import fmt_path
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import TSClient
from ...shared.types import Completion, Context, Doc, Edit
from ...treesitter.request import async_request
from ...treesitter.types import Payload
from .db.database import TDB
def _doc(client: TSClient, context: Context, payload: Payload) -> Optional[Doc]:
def cont() -> Iterator[str]:
clhs, crhs = context.comment
path = PurePath(context.filename)
pos = fmt_path(
context.cwd, path=PurePath(payload.filename), is_dir=False, current=path
)
yield clhs
yield pos
yield ":"
lo, hi = payload.range
yield str(lo)
if hi != lo:
yield "-"
yield str(hi)
yield client.path_sep
yield crhs
yield linesep
if payload.grandparent:
yield clhs
yield payload.grandparent.kind
yield linesep
yield payload.grandparent.text
yield crhs
if payload.grandparent and payload.parent:
yield linesep
yield clhs
yield client.path_sep
yield crhs
yield linesep
if payload.parent:
yield clhs
yield payload.parent.kind
yield linesep
yield payload.parent.text
yield crhs
doc = Doc(syntax=context.filetype, text="".join(cont()))
return doc
class TSClient(BaseClient, _AlwaysTop):
path_sep: str
slow_threshold: float
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
class Edit:
new_text: str
class Completion:
source: str
always_on_top: bool
weight_adjust: float
label: str
sort_by: str
primary_edit: Edit
adjust_indent: bool
icon_match: Optional[str]
uid: UUID = field(default_factory=uuid4)
secondary_edits: Sequence[RangeEdit] = ()
preselect: bool = False
kind: str = ""
doc: Optional[Doc] = None
extern: Union[ExternLSP, ExternLUA, ExternPath, None] = None
class Payload(SimplePayload):
filename: str
range: Tuple[int, int]
parent: Optional[SimplePayload]
grandparent: Optional[SimplePayload]
def _trans(client: TSClient, context: Context, payload: Payload) -> Completion:
edit = Edit(new_text=payload.text)
icon_match, _, _ = payload.kind.partition(".")
cmp = Completion(
source=client.short_name,
always_on_top=client.always_on_top,
weight_adjust=client.weight_adjust,
label=edit.new_text,
sort_by=payload.text,
primary_edit=edit,
adjust_indent=False,
kind=payload.kind,
doc=_doc(client, context=context, payload=payload),
icon_match=icon_match,
)
return cmp | null |
154,813 | from contextlib import closing, suppress
from sqlite3 import Connection, Cursor, OperationalError
from typing import Iterable, Iterator, Mapping
from ....consts import TREESITTER_DB
from ....databases.types import DB
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from ....treesitter.types import Payload, SimplePayload
from .sql import sql
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init() -> Connection:
conn = Connection(TREESITTER_DB, isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,814 | from contextlib import closing, suppress
from sqlite3 import Connection, Cursor, OperationalError
from typing import Iterable, Iterator, Mapping
from ....consts import TREESITTER_DB
from ....databases.types import DB
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from ....treesitter.types import Payload, SimplePayload
from .sql import sql
sql = loader(Path(__file__).resolve(strict=True).parent)
def _ensure_buffer(cursor: Cursor, buf_id: int, filetype: str, filename: str) -> None:
cursor.execute(sql("select", "buffer_by_id"), {"rowid": buf_id})
row = {
"rowid": buf_id,
"filetype": filetype,
"filename": filename,
}
if cursor.fetchone():
cursor.execute(sql("update", "buffer"), row)
else:
cursor.execute(sql("insert", "buffer"), row) | null |
154,815 | from dataclasses import dataclass
from os import linesep
from pathlib import PurePath
from typing import AsyncIterator, Iterator, Mapping, Optional, Sequence, Tuple
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.window import Window
from ...paths.show import fmt_path
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import BuffersClient
from ...shared.types import Completion, Context, Doc, Edit
from .db.database import BDB, BufferWord, Update
class _Info:
buf_id: int
filetype: str
filename: str
range: Tuple[int, int]
lines: Sequence[str]
buffers: Mapping[Buffer, int]
async def _info() -> Optional[_Info]:
try:
win = await Window.get_current()
height = await win.get_height()
buf = await win.get_buf()
bufs = await Buffer.list(listed=True)
buffers = {buf: await buf.line_count() for buf in bufs}
if (current_lines := buffers.get(buf)) is None:
return None
else:
row, _ = await win.get_cursor()
lo = max(0, row - height)
hi = min(current_lines, row + height + 1)
lines = await buf.get_lines(lo=lo, hi=hi)
filetype = await buf.filetype()
filename = (await buf.get_name()) or ""
info = _Info(
buf_id=buf.number,
filetype=filetype,
filename=filename,
range=(lo, hi),
lines=lines,
buffers=buffers,
)
return info
except NvimError:
return None | null |
154,816 | from dataclasses import dataclass
from os import linesep
from pathlib import PurePath
from typing import AsyncIterator, Iterator, Mapping, Optional, Sequence, Tuple
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.window import Window
from ...paths.show import fmt_path
from ...shared.executor import AsyncExecutor
from ...shared.runtime import Supervisor
from ...shared.runtime import Worker as BaseWorker
from ...shared.settings import BuffersClient
from ...shared.types import Completion, Context, Doc, Edit
from .db.database import BDB, BufferWord, Update
def fmt_path(
cwd: PurePath, path: PurePath, is_dir: bool, current: Optional[PurePath] = None
) -> str:
if path == current:
return curdir
posfix = sep if is_dir else ""
with suppress(ValueError):
rel = path.relative_to(cwd)
return f"{curdir}{sep}{normcase(rel)}{posfix}"
with suppress(ValueError):
rel = path.relative_to(_HOME)
return f"~{sep}{normcase(rel)}{posfix}"
return f"{normcase(path)}{posfix}"
class BuffersClient(_WordbankClient, _AlwaysTop):
same_filetype: bool
parent_scope: str
class Context:
"""
|... line ...|
|... line_before 🐭 line_after ...|
|... <syms_before><words_before>🐭<words_after><syms_after> ...|
"""
manual: bool
# CHANGE ID <-> Triggered by NVIM, ie lines changes
change_id: UUID
# COMMIT ID <-> Triggered by COQ
commit_id: UUID
cwd: PurePath
buf_id: int
filetype: str
filename: str
line_count: int
linefeed: Literal["\r\n", "\n", "\r"]
tabstop: int
expandtab: bool
comment: Tuple[str, str]
position: NvimPos
cursor: Cursors
scr_col: int
win_size: int
line: str
line_before: str
line_after: str
lines: Sequence[str]
lines_before: Sequence[str]
lines_after: Sequence[str]
words: str
words_before: str
words_after: str
syms: str
syms_before: str
syms_after: str
ws_before: str
ws_after: str
l_words_before: str
l_words_after: str
l_syms_before: str
l_syms_after: str
is_lower: bool
change: Optional[ChangeEvent]
class Doc:
text: str
syntax: str
class BufferWord:
text: str
filetype: str
filename: str
line_num: int
def _doc(client: BuffersClient, context: Context, word: BufferWord) -> Doc:
def cont() -> Iterator[str]:
if not client.same_filetype and word.filetype:
yield f"{word.filetype}{client.parent_scope}"
path = PurePath(word.filename)
pos = fmt_path(
context.cwd, path=path, is_dir=False, current=PurePath(context.filename)
)
yield f"{pos}:{word.line_num}"
return Doc(text=linesep.join(cont()), syntax="") | null |
154,817 | from contextlib import closing, suppress
from dataclasses import dataclass
from itertools import islice
from random import shuffle
from sqlite3 import Connection, OperationalError
from sqlite3.dbapi2 import Cursor
from typing import AbstractSet, Iterator, Mapping, Optional, Sequence, Tuple
from uuid import uuid4
from pynvim_pp.lib import recode
from ....consts import BUFFER_DB, DEBUG
from ....databases.types import DB
from ....shared.parse import coalesce
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from .sql import sql
def _ensure_buffer(cursor: Cursor, buf_id: int, filetype: str, filename: str) -> None:
cursor.execute(sql("select", "buffer_by_id"), {"rowid": buf_id})
row = {
"rowid": buf_id,
"filetype": filetype,
"filename": filename,
}
if cursor.fetchone():
cursor.execute(sql("update", "buffer"), row)
else:
cursor.execute(sql("insert", "buffer"), row)
sql = loader(Path(__file__).resolve(strict=True).parent)
def _setlines(
cursor: Cursor,
unifying_chars: AbstractSet[str],
tokenization_limit: int,
include_syms: bool,
buf_id: int,
filetype: str,
filename: str,
lo: int,
hi: int,
lines: Sequence[str],
) -> None:
def m0() -> Iterator[Tuple[int, str, bytes]]:
for line_num, line in enumerate(lines, start=lo):
line_id = uuid4().bytes
yield line_num, recode(line), line_id
line_info = [*m0()]
shuffle(line_info)
def m1() -> Iterator[Mapping]:
for line_num, line, line_id in line_info:
yield {
"rowid": line_id,
"buffer_id": buf_id,
"line_num": line_num,
"line": line if DEBUG else "",
}
def m2() -> Iterator[Mapping]:
for line_num, line, line_id in line_info:
for word in coalesce(
unifying_chars,
include_syms=include_syms,
backwards=None,
chars=line,
):
yield {"line_id": line_id, "word": word, "line_num": line_num}
_ensure_buffer(
cursor,
buf_id=buf_id,
filetype=filetype,
filename=filename,
)
cursor.execute(
sql("delete", "lines"),
{"buffer_id": buf_id, "lo": lo, "hi": hi},
)
shift = len(lines) - (hi - lo)
cursor.execute(
sql("update", "lines_shift_1"),
{"buffer_id": buf_id, "lo": lo, "shift": shift},
)
cursor.execute(sql("update", "lines_shift_2"), {"buffer_id": buf_id})
with suppress(UnicodeEncodeError):
cursor.executemany(sql("insert", "line"), m1())
with suppress(UnicodeEncodeError):
cursor.executemany(sql("insert", "word"), islice(m2(), tokenization_limit))
cursor.execute(sql("select", "line_count"), {"buffer_id": buf_id})
count = cursor.fetchone()["line_count"]
if not count:
cursor.execute(
sql("insert", "line"),
{"rowid": uuid4().bytes, "line": "", "buffer_id": buf_id, "line_num": 0},
) | null |
154,818 | from contextlib import closing, suppress
from dataclasses import dataclass
from itertools import islice
from random import shuffle
from sqlite3 import Connection, OperationalError
from sqlite3.dbapi2 import Cursor
from typing import AbstractSet, Iterator, Mapping, Optional, Sequence, Tuple
from uuid import uuid4
from pynvim_pp.lib import recode
from ....consts import BUFFER_DB, DEBUG
from ....databases.types import DB
from ....shared.parse import coalesce
from ....shared.settings import MatchOptions
from ....shared.sql import BIGGEST_INT, init_db, like_esc
from .sql import sql
sql = loader(Path(__file__).resolve(strict=True).parent)
def _init() -> Connection:
conn = Connection(BUFFER_DB, isolation_level=None)
init_db(conn)
conn.executescript(sql("create", "pragma"))
conn.executescript(sql("create", "tables"))
return conn | null |
154,819 | from dataclasses import replace
from std2.types import never
from ..shared.settings import IconMode, Icons
from ..shared.types import Completion
class IconMode(Enum):
none = auto()
short = auto()
long = auto()
class Icons:
mode: IconMode
spacing: int
aliases: Mapping[str, str]
mappings: Mapping[str, str]
class Completion:
source: str
always_on_top: bool
weight_adjust: float
label: str
sort_by: str
primary_edit: Edit
adjust_indent: bool
icon_match: Optional[str]
uid: UUID = field(default_factory=uuid4)
secondary_edits: Sequence[RangeEdit] = ()
preselect: bool = False
kind: str = ""
doc: Optional[Doc] = None
extern: Union[ExternLSP, ExternLUA, ExternPath, None] = None
def iconify(icons: Icons, completion: Completion) -> Completion:
if not completion.icon_match:
return completion
else:
alias = icons.aliases.get(completion.icon_match) or completion.icon_match
kind = icons.mappings.get(alias)
if not kind:
return completion
else:
if icons.mode is IconMode.none:
return completion
elif icons.mode is IconMode.short:
return replace(completion, kind=kind + (icons.spacing - 1) * " ")
elif icons.mode is IconMode.long:
spc = max(1, icons.spacing) * " "
new_kind = (
f"{kind}{spc}{completion.kind}"
if completion.kind
else kind + (icons.spacing - 1) * " "
)
return replace(completion, kind=new_kind)
else:
never(icons.mode) | null |
154,820 | from argparse import Namespace
from contextlib import nullcontext, suppress
from locale import strxfrm
from os.path import normcase
from pathlib import PurePath
from typing import Any, Iterator, Mapping, Sequence
from pynvim_pp.buffer import Buffer
from pynvim_pp.hold import hold_win
from pynvim_pp.lib import display_width
from pynvim_pp.logging import log
from pynvim_pp.nvim import Nvim
from pynvim_pp.operators import operator_marks
from pynvim_pp.preview import set_preview
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from pynvim_pp.window import Window
from std2.argparse import ArgparseError, ArgParser
from std2.locale import pathsort_key
from yaml import SafeDumper, add_representer, safe_dump_all
from yaml.nodes import ScalarNode, SequenceNode
from ...clients.snippet.worker import Worker as SnipWorker
from ...consts import REPL_GRAMMAR
from ...lang import LANG
from ...paths.show import fmt_path
from ...registry import rpc
from ...shared.types import SnippetGrammar
from ...snippets.parsers.lexer import decode_mark_idx
from ...snippets.parsers.types import ParseError, ParseInfo
from ...snippets.types import LoadError
from ..rt_types import Stack
from .snippets import (
Compiled,
compile_one,
compile_user_snippets,
slurp_compiled,
snippet_paths,
user_mtimes,
)
_WIDTH = 80
_TAB = 2
def _repr_str(dumper: SafeDumper, data: str) -> ScalarNode:
if len(data.splitlines()) > 1:
style = "|"
elif display_width(data, tabsize=_TAB) > _WIDTH:
style = ">"
else:
style = ""
node = dumper.represent_scalar("tag:yaml.org,2002:str", data, style=style)
return node | null |
154,821 | from argparse import Namespace
from contextlib import nullcontext, suppress
from locale import strxfrm
from os.path import normcase
from pathlib import PurePath
from typing import Any, Iterator, Mapping, Sequence
from pynvim_pp.buffer import Buffer
from pynvim_pp.hold import hold_win
from pynvim_pp.lib import display_width
from pynvim_pp.logging import log
from pynvim_pp.nvim import Nvim
from pynvim_pp.operators import operator_marks
from pynvim_pp.preview import set_preview
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from pynvim_pp.window import Window
from std2.argparse import ArgparseError, ArgParser
from std2.locale import pathsort_key
from yaml import SafeDumper, add_representer, safe_dump_all
from yaml.nodes import ScalarNode, SequenceNode
from ...clients.snippet.worker import Worker as SnipWorker
from ...consts import REPL_GRAMMAR
from ...lang import LANG
from ...paths.show import fmt_path
from ...registry import rpc
from ...shared.types import SnippetGrammar
from ...snippets.parsers.lexer import decode_mark_idx
from ...snippets.parsers.types import ParseError, ParseInfo
from ...snippets.types import LoadError
from ..rt_types import Stack
from .snippets import (
Compiled,
compile_one,
compile_user_snippets,
slurp_compiled,
snippet_paths,
user_mtimes,
)
def _repr_seq(dumper: SafeDumper, data: Sequence[Any]) -> SequenceNode:
node = dumper.represent_sequence("tag:yaml.org,2002:seq", data, flow_style=True)
return node | null |
154,822 | from argparse import Namespace
from contextlib import nullcontext, suppress
from locale import strxfrm
from os.path import normcase
from pathlib import PurePath
from typing import Any, Iterator, Mapping, Sequence
from pynvim_pp.buffer import Buffer
from pynvim_pp.hold import hold_win
from pynvim_pp.lib import display_width
from pynvim_pp.logging import log
from pynvim_pp.nvim import Nvim
from pynvim_pp.operators import operator_marks
from pynvim_pp.preview import set_preview
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from pynvim_pp.window import Window
from std2.argparse import ArgparseError, ArgParser
from std2.locale import pathsort_key
from yaml import SafeDumper, add_representer, safe_dump_all
from yaml.nodes import ScalarNode, SequenceNode
from ...clients.snippet.worker import Worker as SnipWorker
from ...consts import REPL_GRAMMAR
from ...lang import LANG
from ...paths.show import fmt_path
from ...registry import rpc
from ...shared.types import SnippetGrammar
from ...snippets.parsers.lexer import decode_mark_idx
from ...snippets.parsers.types import ParseError, ParseInfo
from ...snippets.types import LoadError
from ..rt_types import Stack
from .snippets import (
Compiled,
compile_one,
compile_user_snippets,
slurp_compiled,
snippet_paths,
user_mtimes,
)
def _parse_args(args: Sequence[str], filetype: str) -> Namespace:
parser = ArgParser()
sub_parsers = parser.add_subparsers(dest="action", required=True)
sub_parsers.add_parser("ls")
sub_parsers.add_parser("cd")
sub_parsers.add_parser("compile")
with nullcontext(sub_parsers.add_parser("edit")) as p:
p.add_argument("filetype", nargs="?", default=filetype)
return parser.parse_args(args)
LANG = _Lang({})
def fmt_path(
cwd: PurePath, path: PurePath, is_dir: bool, current: Optional[PurePath] = None
) -> str:
if path == current:
return curdir
posfix = sep if is_dir else ""
with suppress(ValueError):
rel = path.relative_to(cwd)
return f"{curdir}{sep}{normcase(rel)}{posfix}"
with suppress(ValueError):
rel = path.relative_to(_HOME)
return f"~{sep}{normcase(rel)}{posfix}"
return f"{normcase(path)}{posfix}"
class ParseError(Exception): ...
class LoadError(Exception): ...
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
async def snippet_paths(user_path: Optional[Path]) -> Sequence[Path]:
async def cont() -> AsyncIterator[Path]:
if user_path:
std_conf = Path(await Nvim.fn.stdpath(str, "config"))
if resolved := _resolve(std_conf, path=user_path):
yield resolved
for path in await Nvim.list_runtime_paths():
yield path / "coq-user-snippets"
paths = [p async for p in cont()]
return paths
async def user_mtimes(
user_path: Optional[Path],
) -> Tuple[Sequence[Path], Mapping[Path, float]]:
paths = await snippet_paths(user_path=user_path)
def cont() -> Iterator[Tuple[Path, float]]:
for path in paths:
with suppress(OSError):
for p in walk(path):
if p.suffix in {".snip"}:
mtime = p.stat().st_mtime
yield p, mtime
return paths, {p: m for p, m in await to_thread(lambda: tuple(cont()))}
async def slurp_compiled(
stack: Stack, warn: AbstractSet[SnippetWarnings], silent: bool
) -> Mapping[Path, float]:
for worker in stack.workers:
if isinstance(worker, SnipWorker):
break
else:
return {}
with timeit("LOAD SNIPS"):
(
cwd,
bundled,
(user_compiled, user_compiled_mtimes),
(_, user_snips_mtimes),
db_mtimes,
) = await gather(
Nvim.getcwd(),
_bundled_mtimes(),
_load_user_compiled(stack.supervisor.vars_dir),
user_mtimes(user_path=stack.settings.clients.snippets.user_path),
worker.db_mtimes(),
)
if stale := db_mtimes.keys() - (bundled.keys() | user_compiled.keys()):
await worker.clean(stale)
if needs_loading := {
path: mtime
for path, mtime in chain(bundled.items(), user_compiled.items())
if mtime > db_mtimes.get(path, -inf)
}:
await _rolling_load(worker, cwd=cwd, compiled=needs_loading, silent=silent)
needs_compilation = {
path: mtime
for path, mtime in user_snips_mtimes.items()
if mtime > user_compiled_mtimes.get(path, -inf)
}
if SnippetWarnings.missing in warn and not (bundled or user_compiled):
await Nvim.write(LANG("fs snip load empty"))
return needs_compilation
async def compile_user_snippets(stack: Stack) -> None:
with timeit("COMPILE SNIPS"):
info = ParseInfo(visual="", clipboard="", comment_str=("", ""))
_, mtimes = await user_mtimes(
user_path=stack.settings.clients.snippets.user_path
)
loaded = await to_thread(
lambda: load_direct(
lambda x: x,
ignore_error=False,
lsp=(),
neosnippet=mtimes,
ultisnip=(),
neosnippet_grammar=SnippetGrammar.lsp,
)
)
_ = tuple(
_trans(
stack.settings.match,
comp=stack.settings.completion,
info=info,
snips=loaded.snippets.values(),
)
)
try:
await _dump_compiled(
stack.supervisor.vars_dir, mtimes=mtimes, loaded=loaded
)
except OSError as e:
await Nvim.write(e)
async def snips(stack: Stack, args: Sequence[str]) -> None:
buf = await Buffer.get_current()
ft = await buf.filetype()
try:
ns = _parse_args(args, filetype=ft or "*")
except ArgparseError as e:
await Nvim.write(e, error=True)
else:
if ns.action == "ls":
cwd = await Nvim.getcwd()
_, mtimes = await user_mtimes(
user_path=stack.settings.clients.snippets.user_path
)
preview: Sequence[str] = tuple(
fmt_path(cwd, path=path, is_dir=False)
for path in sorted(mtimes, key=pathsort_key)
)
if mtimes:
await set_preview(syntax="", preview=preview)
else:
await Nvim.write(LANG("no snippets found"))
elif ns.action == "cd":
paths = await snippet_paths(
user_path=stack.settings.clients.snippets.user_path
)
if paths:
path, *_ = paths
path.mkdir(parents=True, exist_ok=True)
await Nvim.chdir(path, history=True)
else:
assert False
elif ns.action == "compile":
for worker in stack.workers:
if isinstance(worker, SnipWorker):
await Nvim.write(LANG("waiting..."))
try:
await compile_user_snippets(stack)
await slurp_compiled(stack, warn=frozenset(), silent=False)
except (LoadError, ParseError) as e:
preview = str(e).splitlines()
await set_preview(syntax="", preview=preview)
await Nvim.write(LANG("snip parse fail"))
else:
await Nvim.write(LANG("snip parse succ"))
break
else:
await Nvim.write(LANG("snip source not enabled"))
elif ns.action == "edit":
paths, mtimes = await user_mtimes(
user_path=stack.settings.clients.snippets.user_path
)
path, *_ = paths
exts = {path.stem: path for path in mtimes}
snip_path = exts.get(ns.filetype, path / f"{ns.filetype}.snip")
snip_path.parent.mkdir(parents=True, exist_ok=True)
escaped = await Nvim.fn.fnameescape(str, normcase(snip_path))
await Nvim.api.feedkeys(NoneType, f":edit {escaped}", "n", False)
else:
assert False | null |
154,823 | from asyncio import create_task, gather, sleep, wait
from dataclasses import replace
from time import monotonic
from typing import AbstractSet, Any, Literal, Mapping, Optional, Sequence, Union
from uuid import UUID, uuid4
from pynvim_pp.buffer import Buffer, ExtMark, ExtMarker
from pynvim_pp.lib import encode
from pynvim_pp.logging import log, suppress_and_log
from pynvim_pp.nvim import Nvim
from std2.asyncio import cancel
from std2.locale import si_prefixed_smol
from std2.pickle.decoder import new_decoder
from std2.pickle.types import DecodeError
from ...consts import DEBUG
from ...lsp.requests.command import cmd
from ...lsp.requests.resolve import resolve
from ...registry import NAMESPACE, autocmd, rpc
from ...shared.runtime import Metric
from ...shared.types import ChangeEvent, Context, ExternLSP, ExternPath
from ..completions import complete
from ..context import context
from ..edit import NS, edit
from ..rt_types import Stack
from ..state import State, state
from ..trans import trans
_UDECODER = new_decoder[UUID](UUID)
async def _resolve(stack: Stack, metric: Metric) -> Metric:
if not isinstance((extern := metric.comp.extern), ExternLSP):
return metric
else:
if comp := stack.lru.get(metric.comp.uid):
return replace(
metric,
comp=replace(metric.comp, secondary_edits=comp.secondary_edits),
)
else:
done, not_done = await wait(
(create_task(resolve(extern=extern)),),
timeout=stack.settings.clients.lsp.resolve_timeout,
)
await cancel(*not_done)
comp = (await done.pop()) if done else None
if not comp:
return metric
else:
return replace(
metric,
comp=replace(metric.comp, secondary_edits=comp.secondary_edits),
)
_ = (
autocmd("CompleteDone")
<< f"lua {NAMESPACE}.{_comp_done.method}(vim.v.completed_item)"
)
async def cmd(extern: ExternLSP) -> None:
if extern.command:
name = "lsp_third_party_cmd" if isinstance(extern, ExternLUA) else "lsp_command"
command = _ENCODER(extern.command)
clients = {extern.client} if extern.client else set()
async for _ in async_request(name, None, clients, command):
pass
class ExternLSP:
client: Optional[str]
item: Mapping
command: Optional[Any]
async def context(
options: MatchOptions, state: State, change: Optional[ChangeEvent], manual: bool
) -> Context:
with Atomic() as (atomic, ns):
ns.scr_col = atomic.call_function("screencol", ())
ns.win_height = atomic.win_get_height(0)
ns.buf = atomic.get_current_buf()
ns.name = atomic.buf_get_name(0)
ns.line_count = atomic.buf_line_count(0)
ns.filetype = atomic.buf_get_option(0, "filetype")
ns.commentstring = atomic.buf_get_option(0, "commentstring")
ns.fileformat = atomic.buf_get_option(0, "fileformat")
ns.tabstop = atomic.buf_get_option(0, "tabstop")
ns.expandtab = atomic.buf_get_option(0, "expandtab")
ns.cursor = atomic.win_get_cursor(0)
await atomic.commit(NoneType)
scr_col = ns.scr_col(int)
win_size = ns.win_height(int) // 2
buf = ns.buf(Buffer)
(r, col) = cast(Tuple[int, int], ns.cursor(NoneType))
row = r - 1
pos = (row, col)
buf_line_count = ns.line_count(int)
filename = normcase(ns.name(str))
filetype = ns.filetype(str)
comment_str = ns.commentstring(str)
tabstop = ns.tabstop(int)
expandtab = ns.expandtab(bool)
linesep = linefeed(ns.fileformat(str))
lo = max(0, row - win_size)
hi = min(buf_line_count, row + win_size + 1)
lines = await buf.get_lines(lo=lo, hi=hi)
r = row - lo
line = lines[r]
lines_before, lines_after = lines[:r], lines[r + 1 :]
lhs, _, rhs = comment_str.partition("%s")
b_line = encode(line)
line_before, line_after = decode(b_line[:col]), decode(b_line[col:])
utf16_col = len(encode(line_before, encoding=UTF16)) // 2
utf32_col = len(encode(line_before, encoding=UTF32)) // 4
split = gen_split(
lhs=line_before, rhs=line_after, unifying_chars=options.unifying_chars
)
l_words_before, l_words_after = lower(split.word_lhs), lower(split.word_rhs)
l_syms_before, l_syms_after = lower(split.syms_lhs), lower(split.syms_rhs)
is_lower = l_words_before + l_words_after == split.word_lhs + split.word_rhs
if DEBUG:
u32, u16 = cast(
Tuple[int, int],
await Nvim.api.exec_lua(
NoneType, "return {vim.str_utfindex(...)}", (line, col)
),
)
assert utf16_col == u16
assert utf32_col == u32
ctx = Context(
manual=manual,
change_id=state.change_id,
commit_id=state.commit_id,
cwd=state.cwd,
buf_id=buf.number,
filename=filename,
filetype=filetype,
line_count=buf_line_count,
linefeed=linesep,
tabstop=tabstop,
expandtab=expandtab,
comment=(lhs, rhs),
position=pos,
cursor=(row, col, utf16_col, utf32_col),
scr_col=scr_col,
win_size=win_size,
line=split.lhs + split.rhs,
line_before=line_before,
line_after=line_after,
lines=lines,
lines_before=lines_before,
lines_after=lines_after,
words=split.word_lhs + split.word_rhs,
words_before=split.word_lhs,
words_after=split.word_rhs,
syms=split.syms_lhs + split.syms_rhs,
syms_before=split.syms_lhs,
syms_after=split.syms_rhs,
ws_before=split.ws_lhs,
ws_after=split.ws_rhs,
l_words_before=l_words_before,
l_words_after=l_words_after,
l_syms_before=l_syms_before,
l_syms_after=l_syms_after,
is_lower=is_lower,
change=change,
)
return ctx
NS = uuid4()
async def edit(
stack: Stack,
state: State,
metric: Metric,
synthetic: bool,
) -> Optional[Tuple[NvimPos, Optional[TextTransforms]]]:
win = await Window.get_current()
buf = await win.get_buf()
if buf.number != state.context.buf_id:
log.warn("%s", "stale buffer")
return None
else:
await reset_undolevels()
if synthetic:
inserted, movement = "", None
else:
inserted, movement = await _restore(
win=win, buf=buf, pos=state.context.position
)
try:
adjusted, primary, marks, text_trans = await _parse(
buf=buf, stack=stack, state=state, comp=metric.comp
)
except (NvimError, ParseError) as e:
adjusted, primary, marks, text_trans = (
False,
metric.comp.primary_edit,
(),
{},
)
await Nvim.write(LANG("failed to parse snippet"))
log.info("%s", e)
adjust_indent = metric.comp.adjust_indent and not adjusted
lo, hi = _rows_to_fetch(
state.context,
primary,
*metric.comp.secondary_edits,
)
if lo < 0 or hi > state.context.line_count:
log.warn("%s", pformat(("OUT OF BOUNDS", (lo, hi), metric)))
return None
else:
limited_lines = await buf.get_lines(lo=lo, hi=hi)
lines = [*chain(repeat("", times=lo), limited_lines)]
view = _lines(lines)
instructions = _consolidate(
*_instructions(
state.context,
match=stack.settings.match,
comp=stack.settings.completion,
adjust_indent=adjust_indent,
lines=view,
primary=primary,
secondary=metric.comp.secondary_edits,
)
)
n_row, n_col = _cursor(
state.context.position,
instructions=instructions,
)
if not synthetic:
stack.idb.inserted(metric.instance.bytes, sort_by=metric.comp.sort_by)
m_shift = await apply(buf=buf, instructions=instructions)
if inserted:
try:
await buf.set_text(
begin=(n_row, n_col),
end=(n_row, n_col),
text=(inserted,),
)
except NvimError as e:
log.warn("%s", e)
if movement is not None:
try:
await win.set_cursor(row=n_row, col=n_col + movement)
except NvimError as e:
log.warn("%s", e)
if new_marks := tuple(_shift_marks(m_shift, marks=marks)):
await mark(settings=stack.settings, buf=buf, marks=new_marks)
if DEBUG:
log.debug(
"%s",
pformat(
(
metric,
instructions,
)
),
)
return (n_row, n_col), text_trans
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
def state(
cwd: Optional[PurePath] = None,
pum_width: Optional[int] = None,
screen: Optional[Tuple[int, int]] = None,
change_id: Optional[UUID] = None,
commit_id: Optional[UUID] = None,
preview_id: Optional[UUID] = None,
nono_bufs: AbstractSet[int] = frozenset(),
context: Optional[Context] = None,
last_edit: Optional[Metric] = None,
text_trans: Optional[TextTransforms] = None,
inserted_pos: Optional[NvimPos] = None,
pum_location: Union[VoidType, Optional[int]] = Void,
) -> State:
old_state = _CELL.val
new_state = State(
cwd=cwd or old_state.cwd,
pum_width=pum_width or old_state.pum_width,
screen=screen or old_state.screen,
change_id=change_id or old_state.change_id,
commit_id=commit_id or old_state.commit_id,
preview_id=preview_id or old_state.preview_id,
nono_bufs=old_state.nono_bufs | nono_bufs,
context=context or old_state.context,
last_edit=last_edit or old_state.last_edit,
text_trans=text_trans if text_trans is not None else old_state.text_trans,
inserted_pos=inserted_pos or old_state.inserted_pos,
pum_location=(
pum_location
if not isinstance(pum_location, VoidType)
else old_state.pum_location
),
)
_CELL.val = new_state
return new_state
async def _comp_done(stack: Stack, event: Mapping[str, Any]) -> None:
if data := event.get("user_data"):
try:
uid = _UDECODER(data)
except DecodeError:
pass
else:
s = state()
if (metric := stack.metrics.get(uid)) and (ctx := s.context):
row, col = s.context.position
buf = await Buffer.get_current()
if ctx.buf_id == buf.number:
ns = await Nvim.create_namespace(NS)
await buf.clear_namespace(ns)
before, *_ = await buf.get_lines(lo=row, hi=row + 1)
e1 = ExtMark(
buf=buf,
marker=ExtMarker(1),
begin=(row, 0),
end=(row, col),
meta={},
)
e2 = ExtMark(
buf=buf,
marker=ExtMarker(2),
begin=(row, col),
end=(row, len(encode(before))),
meta={},
)
await buf.set_extmarks(ns, extmarks=(e1, e2))
new_metric = await _resolve(stack=stack, metric=metric)
if isinstance((extern := new_metric.comp.extern), ExternLSP):
_, pending = await wait(
(create_task(cmd(extern=extern)),),
timeout=stack.settings.clients.lsp.resolve_timeout,
)
await cancel(*pending)
if new_metric.comp.uid in stack.metrics:
if inserted := await edit(
stack=stack,
state=s,
metric=new_metric,
synthetic=False,
):
inserted_at, text_trans = inserted
else:
inserted_at, text_trans = (-1, -1), None
state(
inserted_pos=inserted_at,
text_trans=text_trans,
last_edit=new_metric,
commit_id=uuid4(),
)
else:
log.warn("%s", "delayed completion") | null |
154,824 | from itertools import chain
from locale import strxfrm
from os import linesep
from string import Template
from typing import Iterable, Iterator, Mapping, Sequence, Tuple
from uuid import uuid4
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins, open_float_win
from pynvim_pp.lib import display_width
from std2.locale import si_prefixed_smol
from ...consts import MD_STATS
from ...databases.insertions.database import Statistics
from ...lang import LANG
from ...registry import rpc
from ..rt_types import Stack
_TPL = f"""
# {LANG("statistics")}
${{chart1}}
${{chart2}}
${{chart3}}
${{desc}}
""".lstrip()
_NS = uuid4()
def _pprn(stats: Iterable[Statistics]) -> Iterator[str]:
MD_STATS = _DOC_DIR / "STATS.md"
class Stack:
async def stats(stack: Stack, *_: str) -> None:
stats = stack.idb.stats()
chart1, chart2, chart3 = _pprn(stats)
desc = MD_STATS.read_text()
lines = (
Template(_TPL)
.substitute(chart1=chart1, chart2=chart2, chart3=chart3, desc=desc)
.splitlines()
)
async for win in list_floatwins(_NS):
await win.close()
buf = await Buffer.create(
listed=False, scratch=True, wipe=True, nofile=True, noswap=True
)
await buf.set_lines(lines)
await buf.opts.set("modifiable", val=False)
await buf.opts.set("syntax", val="markdown")
await open_float_win(_NS, margin=0, relsize=0.95, buf=buf, border="rounded") | null |
154,825 | from asyncio import Task, create_task, gather, sleep
from contextlib import suppress
from typing import Optional
from uuid import uuid4
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from std2.asyncio import cancel
from std2.cell import RefCell
from std2.locale import si_prefixed_smol
from ...clients.buffers.worker import Worker as BufWorker
from ...clients.registers.worker import Worker as RegWorker
from ...clients.tags.worker import Worker as TagsWorker
from ...clients.tmux.worker import Worker as TmuxWorker
from ...clients.tree_sitter.worker import Worker as TSWorker
from ...lang import LANG
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ..rt_types import Stack
from ..state import state
_NS = uuid4()
class Stack:
async def _kill_float_wins(stack: Stack) -> None:
wins = [w async for w in list_floatwins(_NS)]
if len(wins) != 2:
for win in wins:
await win.close() | null |
154,826 | from asyncio import Task, create_task, gather, sleep
from contextlib import suppress
from typing import Optional
from uuid import uuid4
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from std2.asyncio import cancel
from std2.cell import RefCell
from std2.locale import si_prefixed_smol
from ...clients.buffers.worker import Worker as BufWorker
from ...clients.registers.worker import Worker as RegWorker
from ...clients.tags.worker import Worker as TagsWorker
from ...clients.tmux.worker import Worker as TmuxWorker
from ...clients.tree_sitter.worker import Worker as TSWorker
from ...lang import LANG
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ..rt_types import Stack
from ..state import state
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
def state(
cwd: Optional[PurePath] = None,
pum_width: Optional[int] = None,
screen: Optional[Tuple[int, int]] = None,
change_id: Optional[UUID] = None,
commit_id: Optional[UUID] = None,
preview_id: Optional[UUID] = None,
nono_bufs: AbstractSet[int] = frozenset(),
context: Optional[Context] = None,
last_edit: Optional[Metric] = None,
text_trans: Optional[TextTransforms] = None,
inserted_pos: Optional[NvimPos] = None,
pum_location: Union[VoidType, Optional[int]] = Void,
) -> State:
old_state = _CELL.val
new_state = State(
cwd=cwd or old_state.cwd,
pum_width=pum_width or old_state.pum_width,
screen=screen or old_state.screen,
change_id=change_id or old_state.change_id,
commit_id=commit_id or old_state.commit_id,
preview_id=preview_id or old_state.preview_id,
nono_bufs=old_state.nono_bufs | nono_bufs,
context=context or old_state.context,
last_edit=last_edit or old_state.last_edit,
text_trans=text_trans if text_trans is not None else old_state.text_trans,
inserted_pos=inserted_pos or old_state.inserted_pos,
pum_location=(
pum_location
if not isinstance(pum_location, VoidType)
else old_state.pum_location
),
)
_CELL.val = new_state
return new_state
async def _new_cwd(stack: Stack) -> None:
cwd = await Nvim.getcwd()
s = state(cwd=cwd)
for worker in stack.workers:
if isinstance(worker, TagsWorker):
create_task(worker.swap(s.cwd))
break | null |
154,827 | from asyncio import Task, create_task, gather, sleep
from contextlib import suppress
from typing import Optional
from uuid import uuid4
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from std2.asyncio import cancel
from std2.cell import RefCell
from std2.locale import si_prefixed_smol
from ...clients.buffers.worker import Worker as BufWorker
from ...clients.registers.worker import Worker as RegWorker
from ...clients.tags.worker import Worker as TagsWorker
from ...clients.tmux.worker import Worker as TmuxWorker
from ...clients.tree_sitter.worker import Worker as TSWorker
from ...lang import LANG
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ..rt_types import Stack
from ..state import state
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
async def _ft_changed(stack: Stack) -> None:
for worker in stack.workers:
if isinstance(worker, BufWorker):
buf = await Buffer.get_current()
ft = await buf.filetype()
filename = await buf.get_name() or ""
create_task(worker.buf_update(buf.number, filetype=ft, filename=filename))
break | null |
154,828 | from asyncio import Task, create_task, gather, sleep
from contextlib import suppress
from typing import Optional
from uuid import uuid4
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from std2.asyncio import cancel
from std2.cell import RefCell
from std2.locale import si_prefixed_smol
from ...clients.buffers.worker import Worker as BufWorker
from ...clients.registers.worker import Worker as RegWorker
from ...clients.tags.worker import Worker as TagsWorker
from ...clients.tmux.worker import Worker as TmuxWorker
from ...clients.tree_sitter.worker import Worker as TSWorker
from ...lang import LANG
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ..rt_types import Stack
from ..state import state
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
async def _on_focus(stack: Stack) -> None:
for worker in stack.workers:
if isinstance(worker, TmuxWorker):
create_task(worker.periodical())
break | null |
154,829 | from asyncio import Task, create_task, gather, sleep
from contextlib import suppress
from typing import Optional
from uuid import uuid4
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from std2.asyncio import cancel
from std2.cell import RefCell
from std2.locale import si_prefixed_smol
from ...clients.buffers.worker import Worker as BufWorker
from ...clients.registers.worker import Worker as RegWorker
from ...clients.tags.worker import Worker as TagsWorker
from ...clients.tmux.worker import Worker as TmuxWorker
from ...clients.tree_sitter.worker import Worker as TSWorker
from ...lang import LANG
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ..rt_types import Stack
from ..state import state
_CELL = RefCell[Optional[Task]](None)
async def _insert_enter(stack: Stack) -> None:
for worker in stack.workers:
if isinstance(worker, TSWorker):
buf = await Buffer.get_current()
nono_bufs = state().nono_bufs
if buf.number not in nono_bufs:
async def cont() -> None:
if populated := await worker.populate():
keep_going, elapsed = populated
if not keep_going:
state(nono_bufs={buf.number})
msg = LANG(
"source slow",
source=stack.settings.clients.tree_sitter.short_name,
elapsed=si_prefixed_smol(elapsed, precision=0),
)
await Nvim.write(msg, error=True)
create_task(cont())
break
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
def state(
cwd: Optional[PurePath] = None,
pum_width: Optional[int] = None,
screen: Optional[Tuple[int, int]] = None,
change_id: Optional[UUID] = None,
commit_id: Optional[UUID] = None,
preview_id: Optional[UUID] = None,
nono_bufs: AbstractSet[int] = frozenset(),
context: Optional[Context] = None,
last_edit: Optional[Metric] = None,
text_trans: Optional[TextTransforms] = None,
inserted_pos: Optional[NvimPos] = None,
pum_location: Union[VoidType, Optional[int]] = Void,
) -> State:
old_state = _CELL.val
new_state = State(
cwd=cwd or old_state.cwd,
pum_width=pum_width or old_state.pum_width,
screen=screen or old_state.screen,
change_id=change_id or old_state.change_id,
commit_id=commit_id or old_state.commit_id,
preview_id=preview_id or old_state.preview_id,
nono_bufs=old_state.nono_bufs | nono_bufs,
context=context or old_state.context,
last_edit=last_edit or old_state.last_edit,
text_trans=text_trans if text_trans is not None else old_state.text_trans,
inserted_pos=inserted_pos or old_state.inserted_pos,
pum_location=(
pum_location
if not isinstance(pum_location, VoidType)
else old_state.pum_location
),
)
_CELL.val = new_state
return new_state
async def _when_idle(stack: Stack) -> None:
if task := _CELL.val:
_CELL.val = None
await cancel(task)
async def cont() -> None:
await sleep(stack.settings.limits.idle_timeout)
with suppress(NvimError):
buf = await Buffer.get_current()
buf_type = await buf.opts.get(str, "buftype")
if buf_type == "terminal":
await Nvim.api.buf_detach(NoneType, buf)
state(nono_bufs={buf.number})
await gather(_insert_enter(stack=stack), stack.supervisor.notify_idle())
_CELL.val = create_task(cont()) | null |
154,830 | from asyncio import Task, create_task, gather, sleep
from contextlib import suppress
from typing import Optional
from uuid import uuid4
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from std2.asyncio import cancel
from std2.cell import RefCell
from std2.locale import si_prefixed_smol
from ...clients.buffers.worker import Worker as BufWorker
from ...clients.registers.worker import Worker as RegWorker
from ...clients.tags.worker import Worker as TagsWorker
from ...clients.tmux.worker import Worker as TmuxWorker
from ...clients.tree_sitter.worker import Worker as TSWorker
from ...lang import LANG
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ..rt_types import Stack
from ..state import state
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
async def _on_yank(stack: Stack, regsize: int, operator: str, regname: str) -> None:
if operator == "y":
for worker in stack.workers:
if isinstance(worker, RegWorker):
await worker.post_yank(regname, regsize=regsize) | null |
154,831 | from pynvim_pp.keymap import Keymap
from pynvim_pp.nvim import Nvim
from pynvim_pp.settings import Settings
from pynvim_pp.types import NoneType
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ...shared.settings import KeyMapping
from ..rt_types import Stack
from ..state import state
from .marks import nav_mark
from .omnifunc import omnifunc
from .preview import preview_preview
from .repeat import repeat
from .user_snippets import eval_snips
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
def state(
cwd: Optional[PurePath] = None,
pum_width: Optional[int] = None,
screen: Optional[Tuple[int, int]] = None,
change_id: Optional[UUID] = None,
commit_id: Optional[UUID] = None,
preview_id: Optional[UUID] = None,
nono_bufs: AbstractSet[int] = frozenset(),
context: Optional[Context] = None,
last_edit: Optional[Metric] = None,
text_trans: Optional[TextTransforms] = None,
inserted_pos: Optional[NvimPos] = None,
pum_location: Union[VoidType, Optional[int]] = Void,
) -> State:
old_state = _CELL.val
new_state = State(
cwd=cwd or old_state.cwd,
pum_width=pum_width or old_state.pum_width,
screen=screen or old_state.screen,
change_id=change_id or old_state.change_id,
commit_id=commit_id or old_state.commit_id,
preview_id=preview_id or old_state.preview_id,
nono_bufs=old_state.nono_bufs | nono_bufs,
context=context or old_state.context,
last_edit=last_edit or old_state.last_edit,
text_trans=text_trans if text_trans is not None else old_state.text_trans,
inserted_pos=inserted_pos or old_state.inserted_pos,
pum_location=(
pum_location
if not isinstance(pum_location, VoidType)
else old_state.pum_location
),
)
_CELL.val = new_state
return new_state
async def _update_pumheight(stack: Stack) -> None:
height, width = await Nvim.size()
state(screen=(width, height))
pumheight = min(
round(height * stack.settings.display.pum.y_ratio),
stack.settings.display.pum.y_max_len,
)
await Nvim.opts.set("pumheight", val=pumheight) | null |
154,832 | from argparse import Namespace
from dataclasses import dataclass
from itertools import chain
from os import linesep
from random import choice, sample
from sys import stdout
from typing import Sequence, Tuple
from pynvim_pp.lib import decode, encode
from pynvim_pp.nvim import Nvim
from std2.argparse import ArgparseError, ArgParser
from std2.pickle.decoder import new_decoder
from yaml import safe_load
from ...consts import HELO_ARTIFACTS
from ...registry import rpc
from ..rt_types import Stack
_HELO = new_decoder[_Helo](_Helo)(safe_load(decode(HELO_ARTIFACTS.read_bytes())))
def _parse_args(args: Sequence[str]) -> Namespace:
parser = ArgParser()
parser.add_argument("-s", "--shut-up", action="store_true")
return parser.parse_args(args)
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
async def now(stack: Stack, args: Sequence[str]) -> None:
try:
ns = _parse_args(args)
except ArgparseError as e:
await Nvim.write(e, error=True)
else:
if stack.settings.display.statusline.helo:
if not ns.shut_up:
lo, hi = _HELO.chars
chars = choice(range(lo, hi))
star = (choice(_HELO.stars),)
birds = " ".join(chain(star, sample(_HELO.cocks, k=chars), star))
helo = choice(_HELO.helo)
msg = f"{birds} {helo}{linesep}"
encoded = encode(msg)
stdout.buffer.write(encoded)
stdout.buffer.flush() | null |
154,833 | from enum import Enum, auto
from pathlib import Path
from typing import Sequence, Tuple
from uuid import uuid4
from webbrowser import open as open_w
from pynvim_pp.buffer import Buffer
from pynvim_pp.float_win import list_floatwins, open_float_win
from pynvim_pp.lib import decode
from pynvim_pp.nvim import Nvim
from std2.argparse import ArgparseError, ArgParser
from std2.types import never
from ...consts import (
MD_C_SOURCES,
MD_COMPLETION,
MD_CONF,
MD_DISPLAY,
MD_FUZZY,
MD_KEYBIND,
MD_MISC,
MD_PREF,
MD_README,
MD_SNIPS,
MD_SOURCES,
MD_STATS,
URI_C_SOURCES,
URI_COMPLETION,
URI_CONF,
URI_DISPLAY,
URI_FUZZY,
URI_KEYBIND,
URI_MISC,
URI_PREF,
URI_README,
URI_SNIPS,
URI_SOURCES,
URI_STATISTICS,
)
from ...registry import rpc
from ..rt_types import Stack
_NS = uuid4()
def _directory(topic: _Topics) -> Tuple[Path, str]:
if topic is _Topics.index:
return MD_README, URI_README
elif topic is _Topics.config:
return MD_CONF, URI_CONF
elif topic is _Topics.keybind:
return MD_KEYBIND, URI_KEYBIND
elif topic is _Topics.snips:
return MD_SNIPS, URI_SNIPS
elif topic is _Topics.fuzzy:
return MD_FUZZY, URI_FUZZY
elif topic is _Topics.comp:
return MD_COMPLETION, URI_COMPLETION
elif topic is _Topics.display:
return MD_DISPLAY, URI_DISPLAY
elif topic is _Topics.sources:
return MD_SOURCES, URI_SOURCES
elif topic is _Topics.misc:
return MD_MISC, URI_MISC
elif topic is _Topics.stats:
return MD_STATS, URI_STATISTICS
elif topic is _Topics.perf:
return MD_PREF, URI_PREF
elif topic is _Topics.custom_sources:
return MD_C_SOURCES, URI_C_SOURCES
else:
never(topic)
def _parse_args(args: Sequence[str]) -> Tuple[_Topics, bool]:
parser = ArgParser()
parser.add_argument(
"topic",
nargs="?",
choices=tuple(topic.name for topic in _Topics),
default=_Topics.index.name,
)
parser.add_argument("-w", "--web", action="store_true", default=False)
ns = parser.parse_args(args)
return _Topics[ns.topic], ns.web
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
async def _help(stack: Stack, args: Sequence[str]) -> None:
try:
topic, use_web = _parse_args(args)
except ArgparseError as e:
await Nvim.write(e, error=True)
else:
md, uri = _directory(topic)
web_d = open_w(uri) if use_web else False
if not web_d:
async for win in list_floatwins(_NS):
await win.close()
lines = decode(md.read_bytes()).splitlines()
buf = await Buffer.create(
listed=False, scratch=True, wipe=True, nofile=True, noswap=True
)
await buf.set_lines(lines=lines)
await buf.opts.set("modifiable", val=False)
await buf.opts.set("syntax", val="markdown")
await open_float_win(_NS, margin=0, relsize=0.95, buf=buf, border="rounded") | null |
154,834 | from asyncio import Task, gather
from asyncio.tasks import create_task
from contextlib import suppress
from time import monotonic
from typing import Mapping, Optional, Sequence, Tuple, cast
from uuid import uuid4
from pynvim_pp.atomic import Atomic
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from pynvim_pp.window import Window
from std2.asyncio import cancel
from std2.cell import RefCell
from ...clients.buffers.worker import Worker as BufWorker
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ...shared.timeit import timeit
from ...shared.types import ChangeEvent
from ..rt_types import Stack
from ..state import state
from .omnifunc import comp_func
_ = autocmd("BufEnter", "InsertEnter") << f"lua {NAMESPACE}.{_buf_enter.method}()"
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
def state(
cwd: Optional[PurePath] = None,
pum_width: Optional[int] = None,
screen: Optional[Tuple[int, int]] = None,
change_id: Optional[UUID] = None,
commit_id: Optional[UUID] = None,
preview_id: Optional[UUID] = None,
nono_bufs: AbstractSet[int] = frozenset(),
context: Optional[Context] = None,
last_edit: Optional[Metric] = None,
text_trans: Optional[TextTransforms] = None,
inserted_pos: Optional[NvimPos] = None,
pum_location: Union[VoidType, Optional[int]] = Void,
) -> State:
old_state = _CELL.val
new_state = State(
cwd=cwd or old_state.cwd,
pum_width=pum_width or old_state.pum_width,
screen=screen or old_state.screen,
change_id=change_id or old_state.change_id,
commit_id=commit_id or old_state.commit_id,
preview_id=preview_id or old_state.preview_id,
nono_bufs=old_state.nono_bufs | nono_bufs,
context=context or old_state.context,
last_edit=last_edit or old_state.last_edit,
text_trans=text_trans if text_trans is not None else old_state.text_trans,
inserted_pos=inserted_pos or old_state.inserted_pos,
pum_location=(
pum_location
if not isinstance(pum_location, VoidType)
else old_state.pum_location
),
)
_CELL.val = new_state
return new_state
async def _buf_enter(stack: Stack) -> None:
state(commit_id=uuid4())
win = await Window.get_current()
buf = await win.get_buf()
listed = await buf.opts.get(bool, "buflisted")
buf_type = await buf.opts.get(str, "buftype")
if listed and buf_type != "terminal":
if await Nvim.api.buf_attach(bool, buf, False, {}):
for worker in stack.workers:
if isinstance(worker, BufWorker):
filetype = await buf.filetype()
filename = await buf.get_name() or ""
row, _ = await win.get_cursor()
height = await win.get_height()
line_count = await buf.line_count()
lo = max(0, row - height)
hi = min(line_count, row + height + 1)
lines = await buf.get_lines(lo=lo, hi=hi)
await worker.set_lines(
buf.number,
filetype=filetype,
filename=filename,
lo=lo,
hi=hi,
lines=lines,
)
break | null |
154,835 | from asyncio import Task, gather
from asyncio.tasks import create_task
from contextlib import suppress
from time import monotonic
from typing import Mapping, Optional, Sequence, Tuple, cast
from uuid import uuid4
from pynvim_pp.atomic import Atomic
from pynvim_pp.buffer import Buffer
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.nvim import Nvim
from pynvim_pp.rpc_types import NvimError
from pynvim_pp.types import NoneType
from pynvim_pp.window import Window
from std2.asyncio import cancel
from std2.cell import RefCell
from ...clients.buffers.worker import Worker as BufWorker
from ...registry import NAMESPACE, atomic, autocmd, rpc
from ...shared.timeit import timeit
from ...shared.types import ChangeEvent
from ..rt_types import Stack
from ..state import state
from .omnifunc import comp_func
_CELL = RefCell[Optional[Task]](None)
async def _status() -> Tuple[str, str]:
with Atomic() as (atomic, ns):
ns.mode = atomic.get_mode()
ns.complete_info = atomic.call_function("complete_info", (("mode",),))
await atomic.commit(NoneType)
mode = cast(Mapping[str, str], ns.mode(NoneType))["mode"]
comp_mode = cast(Mapping[str, str], ns.complete_info(NoneType))["mode"]
return mode, comp_mode
def timeit(
name: str, *args: Any, force: bool = False, warn: Optional[float] = None
) -> Iterator[None]:
if DEBUG or force or warn is not None:
with _timeit() as t:
yield None
delta = t().total_seconds()
if DEBUG or force or delta >= (warn or 0):
times, cum = _RECORDS.get(name, (0, 0))
tt, c = times + 1, cum + delta
_RECORDS[name] = tt, c
label = name.ljust(50)
time = f"{si_prefixed_smol(delta, precision=0)}s".ljust(8)
ttime = f"{si_prefixed_smol(c / tt, precision=0)}s".ljust(8)
msg = f"TIME -- {label} :: {time} @ {ttime} {' '.join(map(str, args))}"
if force:
log.info("%s", msg)
else:
log.debug("%s", msg)
else:
yield None
class ChangeEvent:
range: range
lines: Sequence[str]
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
def state(
cwd: Optional[PurePath] = None,
pum_width: Optional[int] = None,
screen: Optional[Tuple[int, int]] = None,
change_id: Optional[UUID] = None,
commit_id: Optional[UUID] = None,
preview_id: Optional[UUID] = None,
nono_bufs: AbstractSet[int] = frozenset(),
context: Optional[Context] = None,
last_edit: Optional[Metric] = None,
text_trans: Optional[TextTransforms] = None,
inserted_pos: Optional[NvimPos] = None,
pum_location: Union[VoidType, Optional[int]] = Void,
) -> State:
old_state = _CELL.val
new_state = State(
cwd=cwd or old_state.cwd,
pum_width=pum_width or old_state.pum_width,
screen=screen or old_state.screen,
change_id=change_id or old_state.change_id,
commit_id=commit_id or old_state.commit_id,
preview_id=preview_id or old_state.preview_id,
nono_bufs=old_state.nono_bufs | nono_bufs,
context=context or old_state.context,
last_edit=last_edit or old_state.last_edit,
text_trans=text_trans if text_trans is not None else old_state.text_trans,
inserted_pos=inserted_pos or old_state.inserted_pos,
pum_location=(
pum_location
if not isinstance(pum_location, VoidType)
else old_state.pum_location
),
)
_CELL.val = new_state
return new_state
async def comp_func(
stack: Stack, s: State, change: Optional[ChangeEvent], t0: float, manual: bool
) -> None:
with suppress_and_log():
ctx = await context(
options=stack.settings.match, state=s, change=change, manual=manual
)
should = (
_should_cont(
s,
prev=s.context,
cur=ctx,
skip_after=stack.settings.completion.skip_after,
)
if ctx
else False
)
_, col = ctx.position
if should:
state(context=ctx)
metrics, _ = await gather(
stack.supervisor.collect(ctx),
(
complete(stack=stack, col=col, comps=())
if stack.settings.display.pum.fast_close
else sleep(0)
),
)
s = state()
if s.change_id == ctx.change_id:
vim_comps = tuple(
trans(
stack,
pum_width=s.pum_width,
context=ctx,
metrics=metrics,
)
)
await complete(stack=stack, col=col, comps=vim_comps)
if DEBUG:
t1 = monotonic()
delta = t1 - t0
msg = f"TOTAL >>> {si_prefixed_smol(delta, precision=0)}s".ljust(8)
log.info("%s", msg)
else:
await complete(stack=stack, col=col, comps=())
state(inserted_pos=(-1, -1))
async def _lines_event(
stack: Stack,
_: Buffer,
change_tick: Optional[int],
lo: int,
hi: int,
lines: Sequence[str],
pending: bool,
) -> None:
t0 = monotonic()
if task := _CELL.val:
_CELL.val = None
await cancel(task)
if change_tick is not None:
async def cont() -> None:
with suppress_and_log():
with timeit("POLL"), suppress(NvimError):
(mode, comp_mode), _ = await gather(
_status(), stack.supervisor.interrupt()
)
s = state(change_id=uuid4())
if (
stack.settings.completion.always
and not pending
and mode.startswith("i")
and comp_mode in {"", "eval", "function", "ctrl_x"}
):
change = ChangeEvent(range=range(lo, hi), lines=lines)
await comp_func(
stack=stack, s=s, change=change, t0=t0, manual=False
)
_CELL.val = create_task(cont()) | null |
154,836 | from asyncio import Task, create_task, wait
from dataclasses import dataclass
from functools import lru_cache
from html import unescape
from itertools import chain
from math import ceil
from os import linesep
from typing import (
Any,
Awaitable,
Callable,
Iterator,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
from uuid import UUID, uuid4
from pynvim_pp.buffer import Buffer, ExtMark, ExtMarker
from pynvim_pp.float_win import border_w_h, list_floatwins
from pynvim_pp.lib import display_width
from pynvim_pp.logging import suppress_and_log
from pynvim_pp.nvim import Nvim
from pynvim_pp.preview import buf_set_preview, set_preview
from pynvim_pp.window import Window
from std2 import anext, clamp
from std2.asyncio import cancel
from std2.cell import RefCell
from std2.pickle.decoder import new_decoder
from std2.pickle.types import DecodeError
from std2.string import removeprefix
from ...lsp.requests.resolve import resolve
from ...paths.show import show
from ...registry import NAMESPACE, autocmd, rpc
from ...shared.settings import GhostText, PreviewDisplay
from ...shared.timeit import timeit
from ...shared.trans import expand_tabs
from ...shared.types import Completion, Context, Doc, Edit, ExternLSP, ExternPath
from ..rt_types import Stack
from ..state import State, state
async def _kill_win(stack: Stack, reset: bool) -> None:
if reset:
state(pum_location=None, preview_id=uuid4())
buf = await Buffer.get_current()
ns = await Nvim.create_namespace(_NS)
await buf.clear_namespace(ns)
async for win in list_floatwins(_FLOAT_WIN_UUID):
await win.close()
async def _show_preview(stack: Stack, event: _Event, doc: Doc, s: State) -> None:
if stack.settings.display.preview.enabled:
new_doc = _preprocess(s.context, doc=doc)
text = expand_tabs(s.context, text=new_doc.text)
lines = text.splitlines()
pit = _positions(
stack.settings.display.preview, event=event, lines=lines, state=s
)
def key(k: Tuple[int, int, _Pos]) -> Tuple[int, int, int, int]:
idx, rank, pos = k
return pos.height * pos.width, idx == s.pum_location, -rank, -idx
if ordered := sorted(pit, key=key, reverse=True):
(pum_location, _, pos), *__ = ordered
state(pum_location=pum_location)
buf = await Buffer.create(
listed=False, scratch=True, wipe=True, nofile=True, noswap=True
)
await buf_set_preview(buf=buf, syntax=new_doc.syntax, preview=lines)
await _set_win(display=stack.settings.display.preview, buf=buf, pos=pos)
async def _resolve_comp(
stack: Stack,
event: _Event,
extern: Union[ExternLSP, ExternPath],
maybe_doc: Optional[Doc],
state: State,
) -> None:
prev = _CELL.val
timeout = stack.settings.display.preview.resolve_timeout if maybe_doc else None
async def cont() -> None:
if prev:
await cancel(prev)
with suppress_and_log():
if cached := stack.lru.get(state.preview_id):
doc = cached.doc
else:
if isinstance(extern, ExternLSP):
done, _ = await wait(
(create_task(resolve(extern=extern)),),
timeout=timeout,
)
if comp := (await done.pop()) if done else None:
stack.lru[state.preview_id] = comp
doc = (comp.doc if comp else None) or maybe_doc
elif isinstance(extern, ExternPath):
if doc := await show(
cwd=state.cwd,
path=extern.path,
ellipsis=stack.settings.display.pum.ellipsis,
height=stack.settings.clients.paths.preview_lines,
):
stack.lru[state.preview_id] = Completion(
source="",
always_on_top=False,
weight_adjust=0,
label="",
sort_by="",
primary_edit=Edit(new_text=""),
adjust_indent=False,
doc=doc,
icon_match=None,
)
else:
assert False
if doc:
await _show_preview(
stack=stack,
event=event,
doc=doc,
s=state,
)
_CELL.val = create_task(cont())
async def _virt_text(ghost: GhostText, text: str) -> None:
if ghost.enabled:
lhs, rhs = ghost.context
overlay, *_ = text.splitlines() or ("",)
virt_text = lhs + overlay + rhs
ns = await Nvim.create_namespace(_NS)
win = await Window.get_current()
buf = await win.get_buf()
row, col = await win.get_cursor()
mark = ExtMark(
buf=buf,
marker=ExtMarker(1),
begin=(row, col),
end=(row, col),
meta={
"virt_text_pos": "overlay",
"hl_mode": "combine",
"virt_text": ((virt_text, ghost.highlight_group),),
},
)
await buf.clear_namespace(ns)
await buf.set_extmarks(ns, extmarks=(mark,))
_DECODER = new_decoder[_Event](_Event)
_UDECODER = new_decoder[UUID](UUID)
def timeit(
name: str, *args: Any, force: bool = False, warn: Optional[float] = None
) -> Iterator[None]:
if DEBUG or force or warn is not None:
with _timeit() as t:
yield None
delta = t().total_seconds()
if DEBUG or force or delta >= (warn or 0):
times, cum = _RECORDS.get(name, (0, 0))
tt, c = times + 1, cum + delta
_RECORDS[name] = tt, c
label = name.ljust(50)
time = f"{si_prefixed_smol(delta, precision=0)}s".ljust(8)
ttime = f"{si_prefixed_smol(c / tt, precision=0)}s".ljust(8)
msg = f"TIME -- {label} :: {time} @ {ttime} {' '.join(map(str, args))}"
if force:
log.info("%s", msg)
else:
log.debug("%s", msg)
else:
yield None
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
def state(
cwd: Optional[PurePath] = None,
pum_width: Optional[int] = None,
screen: Optional[Tuple[int, int]] = None,
change_id: Optional[UUID] = None,
commit_id: Optional[UUID] = None,
preview_id: Optional[UUID] = None,
nono_bufs: AbstractSet[int] = frozenset(),
context: Optional[Context] = None,
last_edit: Optional[Metric] = None,
text_trans: Optional[TextTransforms] = None,
inserted_pos: Optional[NvimPos] = None,
pum_location: Union[VoidType, Optional[int]] = Void,
) -> State:
old_state = _CELL.val
new_state = State(
cwd=cwd or old_state.cwd,
pum_width=pum_width or old_state.pum_width,
screen=screen or old_state.screen,
change_id=change_id or old_state.change_id,
commit_id=commit_id or old_state.commit_id,
preview_id=preview_id or old_state.preview_id,
nono_bufs=old_state.nono_bufs | nono_bufs,
context=context or old_state.context,
last_edit=last_edit or old_state.last_edit,
text_trans=text_trans if text_trans is not None else old_state.text_trans,
inserted_pos=inserted_pos or old_state.inserted_pos,
pum_location=(
pum_location
if not isinstance(pum_location, VoidType)
else old_state.pum_location
),
)
_CELL.val = new_state
return new_state
async def _cmp_changed(stack: Stack, event: Mapping[str, Any] = {}) -> None:
await _kill_win(stack=stack, reset=False)
with timeit("PREVIEW"):
try:
ev = _DECODER(event)
user_data = ev.completed_item.get("user_data", "")
uid = _UDECODER(user_data)
except DecodeError:
pass
else:
if metric := stack.metrics.get(uid):
await _virt_text(
ghost=stack.settings.display.ghost_text,
text=metric.comp.primary_edit.new_text,
)
s = state(preview_id=uid)
if metric.comp.extern:
await _resolve_comp(
stack=stack,
event=ev,
extern=metric.comp.extern,
maybe_doc=metric.comp.doc,
state=s,
)
elif metric.comp.doc and metric.comp.doc.text:
await _show_preview(
stack=stack,
event=ev,
doc=metric.comp.doc,
s=s,
) | null |
154,837 | from asyncio import gather
from asyncio.tasks import as_completed
from contextlib import suppress
from dataclasses import dataclass
from itertools import chain
from json import JSONDecodeError, dumps, loads
from math import inf
from os.path import expanduser, expandvars
from pathlib import Path, PurePath
from posixpath import normcase
from string import Template
from tempfile import NamedTemporaryFile
from textwrap import dedent
from typing import (
AbstractSet,
Any,
AsyncIterator,
Iterable,
Iterator,
Mapping,
Optional,
Sequence,
Tuple,
)
from pynvim_pp.lib import decode
from pynvim_pp.logging import log
from pynvim_pp.nvim import Nvim
from pynvim_pp.preview import set_preview
from std2.asyncio import to_thread
from std2.graphlib import recur_sort
from std2.pathlib import walk
from std2.pickle.decoder import new_decoder
from std2.pickle.encoder import new_encoder
from std2.pickle.types import DecodeError
from ...clients.snippet.worker import Worker as SnipWorker
from ...lang import LANG
from ...paths.show import fmt_path
from ...registry import NAMESPACE, atomic, rpc
from ...shared.context import EMPTY_CONTEXT
from ...shared.settings import CompleteOptions, MatchOptions, SnippetWarnings
from ...shared.timeit import timeit
from ...shared.types import (
UTF8,
Edit,
Mark,
SnippetEdit,
SnippetGrammar,
TextTransforms,
)
from ...snippets.loaders.load import load_direct
from ...snippets.loaders.neosnippet import load_neosnippet
from ...snippets.parse import parse_basic
from ...snippets.parsers.types import ParseError, ParseInfo
from ...snippets.types import SCHEMA, LoadedSnips, LoadError, ParsedSnippet
from ..rt_types import Stack
async def slurp_compiled(
stack: Stack, warn: AbstractSet[SnippetWarnings], silent: bool
) -> Mapping[Path, float]:
for worker in stack.workers:
if isinstance(worker, SnipWorker):
break
else:
return {}
with timeit("LOAD SNIPS"):
(
cwd,
bundled,
(user_compiled, user_compiled_mtimes),
(_, user_snips_mtimes),
db_mtimes,
) = await gather(
Nvim.getcwd(),
_bundled_mtimes(),
_load_user_compiled(stack.supervisor.vars_dir),
user_mtimes(user_path=stack.settings.clients.snippets.user_path),
worker.db_mtimes(),
)
if stale := db_mtimes.keys() - (bundled.keys() | user_compiled.keys()):
await worker.clean(stale)
if needs_loading := {
path: mtime
for path, mtime in chain(bundled.items(), user_compiled.items())
if mtime > db_mtimes.get(path, -inf)
}:
await _rolling_load(worker, cwd=cwd, compiled=needs_loading, silent=silent)
needs_compilation = {
path: mtime
for path, mtime in user_snips_mtimes.items()
if mtime > user_compiled_mtimes.get(path, -inf)
}
if SnippetWarnings.missing in warn and not (bundled or user_compiled):
await Nvim.write(LANG("fs snip load empty"))
return needs_compilation
async def compile_user_snippets(stack: Stack) -> None:
with timeit("COMPILE SNIPS"):
info = ParseInfo(visual="", clipboard="", comment_str=("", ""))
_, mtimes = await user_mtimes(
user_path=stack.settings.clients.snippets.user_path
)
loaded = await to_thread(
lambda: load_direct(
lambda x: x,
ignore_error=False,
lsp=(),
neosnippet=mtimes,
ultisnip=(),
neosnippet_grammar=SnippetGrammar.lsp,
)
)
_ = tuple(
_trans(
stack.settings.match,
comp=stack.settings.completion,
info=info,
snips=loaded.snippets.values(),
)
)
try:
await _dump_compiled(
stack.supervisor.vars_dir, mtimes=mtimes, loaded=loaded
)
except OSError as e:
await Nvim.write(e)
LANG = _Lang({})
class ParseError(Exception): ...
class LoadError(Exception): ...
class Stack:
settings: Settings
lru: MutableMapping[UUID, Completion]
metrics: MutableMapping[UUID, Metric]
idb: IDB
supervisor: Supervisor
workers: AbstractSet[Worker]
async def _load_snips(stack: Stack) -> None:
for worker in stack.workers:
if isinstance(worker, SnipWorker):
try:
needs_compilation = await slurp_compiled(
stack=stack,
warn=stack.settings.clients.snippets.warn,
silent=False,
)
if needs_compilation:
await compile_user_snippets(stack)
await slurp_compiled(stack, warn=frozenset(), silent=False)
except (LoadError, ParseError) as e:
preview = str(e).splitlines()
await set_preview(syntax="", preview=preview)
await Nvim.write(LANG("snip parse fail"))
break | null |
154,838 | from asyncio import get_running_loop, run_coroutine_threadsafe, wrap_future
from collections import Counter
from dataclasses import dataclass
from itertools import chain
from typing import Mapping
from uuid import UUID, uuid4
from pynvim_pp.lib import display_width
from ..databases.insertions.database import IDB
from ..shared.context import cword_before
from ..shared.fuzzy import MatchMetrics, metrics
from ..shared.parse import coalesce, lower
from ..shared.runtime import Metric, PReviewer
from ..shared.settings import BaseClient, Icons, MatchOptions, Weights
from ..shared.types import Completion, Context
from .icons import iconify
class ReviewCtx:
batch: UUID
context: Context
proximity: Mapping[str, int]
inserted: Mapping[str, int]
is_lower: bool
def cword_before(
unifying_chars: AbstractSet[str], lower: bool, context: Context, sort_by: str
) -> str:
char = sort_by[:1]
if char.isspace():
return context.ws_before
elif is_word(unifying_chars, chr=char):
return context.l_words_before if lower else context.words_before
else:
return context.l_syms_before if lower else context.syms_before
class MatchMetrics:
prefix_matches: int
edit_distance: float
def metrics(lhs: str, rhs: str, look_ahead: int) -> MatchMetrics:
"""
Front end bias
"""
shorter = min(len(lhs), len(rhs))
if not shorter:
return MatchMetrics(prefix_matches=0, edit_distance=0)
else:
p_matches = _p_matches(lhs, rhs)
cutoff = min(max(len(lhs), len(rhs)), shorter + look_ahead)
more = cutoff - shorter
l, r = lhs[p_matches:cutoff], rhs[p_matches:cutoff]
dist = dl_distance(l, r)
edit_dist = 1 - (dist - more) / shorter
return MatchMetrics(prefix_matches=p_matches, edit_distance=edit_dist)
def lower(text: str) -> str:
return text.casefold()
class MatchOptions:
unifying_chars: AbstractSet[str]
max_results: int
look_ahead: int
exact_matches: int
fuzzy_cutoff: float
class Completion:
source: str
always_on_top: bool
weight_adjust: float
label: str
sort_by: str
primary_edit: Edit
adjust_indent: bool
icon_match: Optional[str]
uid: UUID = field(default_factory=uuid4)
secondary_edits: Sequence[RangeEdit] = ()
preselect: bool = False
kind: str = ""
doc: Optional[Doc] = None
extern: Union[ExternLSP, ExternLUA, ExternPath, None] = None
def _metric(
options: MatchOptions,
ctx: ReviewCtx,
completion: Completion,
) -> MatchMetrics:
match = lower(completion.sort_by) if ctx.is_lower else completion.sort_by
cword = cword_before(
options.unifying_chars, lower=ctx.is_lower, context=ctx.context, sort_by=match
)
return metrics(cword, match, look_ahead=options.look_ahead) | null |
154,839 | from asyncio import get_running_loop, run_coroutine_threadsafe, wrap_future
from collections import Counter
from dataclasses import dataclass
from itertools import chain
from typing import Mapping
from uuid import UUID, uuid4
from pynvim_pp.lib import display_width
from ..databases.insertions.database import IDB
from ..shared.context import cword_before
from ..shared.fuzzy import MatchMetrics, metrics
from ..shared.parse import coalesce, lower
from ..shared.runtime import Metric, PReviewer
from ..shared.settings import BaseClient, Icons, MatchOptions, Weights
from ..shared.types import Completion, Context
from .icons import iconify
class ReviewCtx:
batch: UUID
context: Context
proximity: Mapping[str, int]
inserted: Mapping[str, int]
is_lower: bool
def sigmoid(x: float) -> float:
"""
x -> y ∈ (0.5, 1.5)
"""
return x / (1 + abs(x)) / 2 + 1
class MatchMetrics:
prefix_matches: int
edit_distance: float
class Metric:
instance: UUID
comp: Completion
weight_adjust: float
weight: Weights
label_width: int
kind_width: int
class Weights:
prefix_matches: float
edit_distance: float
recency: float
proximity: float
class Completion:
source: str
always_on_top: bool
weight_adjust: float
label: str
sort_by: str
primary_edit: Edit
adjust_indent: bool
icon_match: Optional[str]
uid: UUID = field(default_factory=uuid4)
secondary_edits: Sequence[RangeEdit] = ()
preselect: bool = False
kind: str = ""
doc: Optional[Doc] = None
extern: Union[ExternLSP, ExternLUA, ExternPath, None] = None
def _join(
ctx: ReviewCtx,
instance: UUID,
completion: Completion,
match_metrics: MatchMetrics,
) -> Metric:
weight = Weights(
prefix_matches=match_metrics.prefix_matches,
edit_distance=match_metrics.edit_distance,
recency=ctx.inserted.get(completion.sort_by, 0),
proximity=ctx.proximity.get(completion.sort_by, 0),
)
label_width = display_width(completion.label, tabsize=ctx.context.tabstop)
# !! WARN
# Use UTF8 len for icon support
# !! WARN
kind_width = len(completion.kind)
metric = Metric(
instance=instance,
comp=completion,
weight_adjust=sigmoid(completion.weight_adjust),
weight=weight,
label_width=label_width,
kind_width=kind_width,
)
return metric | null |
154,840 | from revChatGPT.revChatGPT import Chatbot textwrap.wrap(instr, width=50):
print(i)
def login(config):
expired_creds = OpenAI.token_expired()
# expired_creds = True
if expired_creds:
print_warp("access_token过期,请选择登陆(y)或者使用默认的session_token(n)。请输入(y/n):")
_input = input()
if _input == 'y':
email = input("邮箱:")
pwd = input("密码:")
open_ai_auth = OpenAI.Auth(email_address=email, password=pwd)
try:
open_ai_auth.create_token()
except:
print_warp("登陆失败!请检查邮箱和密码。将使用默认session_token进入。")
print("\n\n\n")
return config
else:
access_token = OpenAI.get_access_token()
config = {"Authorization": access_token[0]}
print("\n\n\n")
return config
else:
print("\n\n\n")
return config
else:
access_token = OpenAI.get_access_token()
config = {"Authorization": access_token[0]}
return config | null |
154,841 | from colorama import Fore, Back, Style
import textwrap
def print_warp(instr):
for i in textwrap.wrap(instr, width=50):
print(Style.BRIGHT + i) | null |
154,842 | from colorama import Fore, Back, Style
import textwrap
def error(msg):
print(Style.BRIGHT + Fore.RED + msg)
print(Style.RESET_ALL) | null |
154,843 | from colorama import Fore, Back, Style
import textwrap
def input_option(prompt, true_option, false_option, default_option):
res = input(prompt + f"({true_option}/{false_option}, default {default_option})")
if res == true_option:
return True
elif res == false_option:
return False
else:
if default_option == true_option:
return True
else:
return False | null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.