本文整理匯總了Python中typing.BinaryIO方法的典型用法代碼示例。如果您正苦於以下問題:Python typing.BinaryIO方法的具體用法?Python typing.BinaryIO怎麽用?Python typing.BinaryIO使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類typing
的用法示例。
在下文中一共展示了typing.BinaryIO方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。
示例1: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def __init__(
self,
stream: BinaryIO = None,
filename: str = None,
name: str = None,
content_type: str = None,
headers: Dict = None,
) -> None:
self.name = name
self.stream = stream or io.BytesIO()
self.filename = filename
if headers is None:
headers = {}
self.headers = headers
if content_type is not None:
headers["Content-Type"] = content_type
示例2: save
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def save(self, destination: BinaryIO, buffer_size: int = 16384) -> None:
"""Save the file to the destination.
Arguments:
destination: A filename (str) or file object to write to.
buffer_size: Buffer size as used as length in
:func:`shutil.copyfileobj`.
"""
close_destination = False
if isinstance(destination, str):
destination = open(destination, "wb")
close_destination = True
try:
copyfileobj(self.stream, destination, buffer_size) # type: ignore
finally:
if close_destination:
destination.close()
示例3: _send
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def _send(self, message: Union[bytes, str], handler: Callable[[Union[socket.socket, ssl.SSLSocket], BinaryIO, Union[bytes, str]], None]) -> None:
"""
Send message in a thread safe manner. Handler is expected to be of the form...
::
my_handler(socket, socket_file, message)
"""
with self._send_lock:
try:
if not self.is_alive():
raise stem.SocketClosed()
handler(self._socket, self._socket_file, message)
except stem.SocketClosed:
# if send_message raises a SocketClosed then we should properly shut
# everything down
if self.is_alive():
self.close()
raise
示例4: _write_to_socket
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def _write_to_socket(socket_file: BinaryIO, message: Union[str, bytes]) -> None:
try:
socket_file.write(stem.util.str_tools._to_bytes(message))
socket_file.flush()
except socket.error as exc:
log.info('Failed to send: %s' % exc)
# When sending there doesn't seem to be a reliable method for
# distinguishing between failures from a disconnect verses other things.
# Just accounting for known disconnection responses.
if str(exc) == '[Errno 32] Broken pipe':
raise stem.SocketClosed(exc)
else:
raise stem.SocketError(exc)
except AttributeError:
# if the control_file has been closed then flush will receive:
# AttributeError: 'NoneType' object has no attribute 'sendall'
log.info('Failed to send: file has been closed')
raise stem.SocketClosed('file has been closed')
示例5: _parse_file
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def _parse_file(descriptor_file: BinaryIO, validate: bool = False, **kwargs: Any) -> Iterator['stem.descriptor.bandwidth_file.BandwidthFile']:
"""
Iterates over the bandwidth authority metrics in a file.
:param descriptor_file: file with descriptor content
:param validate: checks the validity of the descriptor's content if
**True**, skips these checks otherwise
:param kwargs: additional arguments for the descriptor constructor
:returns: :class:`stem.descriptor.bandwidth_file.BandwidthFile` object
:raises:
* **ValueError** if the contents is malformed and validate is **True**
* **IOError** if the file can't be read
"""
if kwargs:
raise ValueError('BUG: keyword arguments unused by bandwidth files')
yield BandwidthFile(descriptor_file.read(), validate)
示例6: _parse_file_detached_sigs
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def _parse_file_detached_sigs(detached_signature_file: BinaryIO, validate: bool = False) -> Iterator['stem.descriptor.networkstatus.DetachedSignature']:
"""
Parses a file containing one or more detached signatures.
:param detached_signature_file: file with detached signatures
:param validate: checks the validity of the detached signature's
contents if **True**, skips these checks otherwise
:returns: iterator for :class:`stem.descriptor.networkstatus.DetachedSignature`
instances in the file
:raises:
* **ValueError** if the detached signatures are invalid and validate is **True**
* **IOError** if the file can't be read
"""
while True:
detached_sig_content = _read_until_keywords('consensus-digest', detached_signature_file, ignore_first = True)
if detached_sig_content:
yield stem.descriptor.networkstatus.DetachedSignature(bytes.join(b'', detached_sig_content), validate = validate)
else:
break # done parsing file
示例7: search_by_pcap
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def search_by_pcap(self, pcap_file_obj: typing.BinaryIO) -> requests.Response:
"""
Search by a pcap/pcapng file, get list list of similar packet captures
:param pcap_file_obj: A file like object that provides a .read() interface (E.G open('path_to_pcap.pcap, 'rb') )
:return: A request.Response instance, containing a graph of similar pcaps with matched terms
"""
response = super().analyze(pcap_file_obj)
if response.status_code == 200:
sim_response = super().pcap_similar(response.json()['pcap_metadata']['md5'])
elif response.status_code == 202:
pcap_id = response.json()['id']
info_response = super().pcap_info(pcap_id)
while info_response.status_code == 404:
print('[{}] Waiting for {} to finish analyzing.'.format(datetime.utcnow(), pcap_id))
info_response = super().pcap_info(response.json()['id'])
time.sleep(10)
print('[{}] Fetching results for {}.'.format(datetime.utcnow(), pcap_id))
time.sleep(5)
sim_response = super().pcap_similar(response.json()['id'])
else:
return response
return sim_response
示例8: to_graphml
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def to_graphml(graph: BELGraph, path: Union[str, BinaryIO], schema: Optional[str] = None) -> None:
"""Write a graph to a GraphML XML file using :func:`networkx.write_graphml`.
:param graph: BEL Graph
:param path: Path to the new exported file
:param schema: Type of export. Currently supported: "simple" and "umbrella".
The .graphml file extension is suggested so Cytoscape can recognize it.
By default, this function exports using the PyBEL schema of including modifier information into the edges.
As an alternative, this function can also distinguish between
"""
if schema is None or schema == 'simple':
rv = _to_graphml_simple(graph)
elif schema == 'umbrella':
rv = _to_graphml_umbrella(graph)
else:
raise ValueError('Unhandled schema: {}'.format(schema))
nx.write_graphml(rv, path)
示例9: read_varint_from_file
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def read_varint_from_file(fptr: typing.BinaryIO) -> int:
buffer = fptr.read(1)
if (buffer[0] < 0xfd):
value_size = 1
value = buffer[0]
elif (buffer[0] == 0xfd):
value_size = 2
buffer = fptr.read(value_size)
value = int.from_bytes(buffer[0: 2], byteorder='little')
elif (buffer[0] == 0xfe):
value_size = 4
buffer = fptr.read(value_size)
value = int.from_bytes(buffer[0: 4], byteorder='little')
elif (buffer[0] == 0xff):
value_size = 8
buffer = fptr.read(value_size)
value = int.from_bytes(buffer[0: 8], byteorder='little')
else:
raise Exception("Invalid varint size")
if value_size != len(buffer):
raise ValueError('File end before read completed.')
return value
示例10: from_reader
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def from_reader(cls, r: typing.BinaryIO):
o = Expression()
d = 1
while True:
i = Instruction.from_reader(r)
if not i:
break
o.data.append(i)
if i.opcode in [instruction.block, instruction.loop, instruction.if_]:
d += 1
if i.opcode == instruction.end:
d -= 1
if d == 0:
break
if o.data[-1].opcode != instruction.end:
raise Exception('pywasm: expression did not end with 0xb')
o.position = cls.mark(o.data)
return o
示例11: download_file
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def download_file(source: str, output: Union[str, BinaryIO], log: logging.Logger,
chunk_size: int = -1) -> None:
"""
Download a file by its URL.
:param source: URL to fetch.
:param output: Written file name or file object.
:param log: Logger to use.
:param chunk_size: Buffer size, if the underlying downloader supports setting it.
:return: None
"""
__downloaders__[source[:source.find("://")]](source, output, log, chunk_size)
示例12: save
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def save(self, output: Union[str, BinaryIO], series: Optional[str] = None,
deps: Iterable=tuple(), create_missing_dirs: bool=True) -> "Model":
"""
Serialize the model to a file.
:param output: Path to the file or a file object.
:param series: Name of the model series. If it is None, it will be taken from \
the current value; if the current value is empty, an error is raised.
:param deps: List of the dependencies.
:param create_missing_dirs: create missing directories in output path if the output is a \
path.
:return: self
"""
check_license(self.license)
if series is None:
if self.series is None:
raise ValueError("series must be specified")
else:
self.series = series
if isinstance(output, str) and create_missing_dirs:
dirs = os.path.split(output)[0]
if dirs:
os.makedirs(dirs, exist_ok=True)
self.set_dep(*deps)
tree = self._generate_tree()
self._write_tree(tree, output)
self._initial_version = self.version
return self
示例13: download
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def download(source: str, file: Union[str, BinaryIO], log: logging.Logger,
chunk_size: int = -1) -> None:
"""
Download a file from an HTTP source.
:param source: URL to fetch.
:param file: Where to store the downloaded data.
:param log: Logger.
:param chunk_size: Size of the download buffer.
"""
log.info("Fetching %s...", source)
if chunk_size < 0:
chunk_size = DEFAULT_DOWNLOAD_CHUNK_SIZE
r = requests.get(source, stream=True)
if r.status_code != 200:
log.error(
"An error occurred while fetching the model, with code %s" % r.status_code)
raise ValueError
if isinstance(file, str):
os.makedirs(os.path.dirname(file), exist_ok=True)
f = open(file, "wb")
else:
f = file
try:
total_length = int(r.headers.get("content-length"))
num_chunks = math.ceil(total_length / chunk_size)
if num_chunks == 1:
f.write(r.content)
else:
for chunk in progress_bar(
r.iter_content(chunk_size=chunk_size),
log,
expected_size=num_chunks):
if chunk:
f.write(chunk)
finally:
if isinstance(file, str):
f.close()
示例14: __post_init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def __post_init__(self):
# all perf file descriptors, except leaders
self._event_files: List[BinaryIO] = []
# perf data file descriptors (only leaders) per pmu and per cpu
self._group_event_leader_files_per_pmu: Dict[int, Dict[int, BinaryIO]] = defaultdict(
lambda: defaultdict())
# DO the magic and enabled everything + start counting
self._open()
示例15: __init__
# 需要導入模塊: import typing [as 別名]
# 或者: from typing import BinaryIO [as 別名]
def __init__(
self,
cgroup_path: str,
event_names: Iterable[MetricName],
platform: Platform,
aggregate_for_all_cpus_with_sum: bool = True
):
# Provide cgroup_path with leading '/'
assert cgroup_path.startswith('/')
# cgroup path without leading '/'
relative_cgroup_path = cgroup_path[1:]
self._cgroup_fd: int = _get_cgroup_fd(relative_cgroup_path)
# all perf file descriptors, except leaders
self._event_files: List[BinaryIO] = []
# perf data file descriptors (only leaders) per cpu
self._group_event_leader_files: Dict[int, BinaryIO] = {}
self._platform = platform
self._aggregate_for_all_cpus_with_sum = aggregate_for_all_cpus_with_sum
# keep event names for output information
self._event_names: List[MetricName] = event_names
# DO the magic and enabled everything + start counting
self._open()