PKڔPv}hhipfshttpclient/__init__.py"""Python IPFS HTTP CLIENT library""" from .version import __version__ ################################### # Import stable HTTP CLIENT parts # ################################### from . import exceptions from .client import DEFAULT_ADDR, DEFAULT_BASE from .client import VERSION_MINIMUM, VERSION_MAXIMUM from .client import Client, assert_version, connect PKPfipfshttpclient/encoding.py"""Classes for encoding and decoding datastreams into object values""" import abc import codecs import typing as ty import json from . import exceptions from . import utils if ty.TYPE_CHECKING: import typing_extensions as ty_ext else: from . import utils as ty_ext T = ty.TypeVar("T") def empty_gen() -> ty.Generator[T, None, None]: """A generator that yields nothing""" if False: # pragma: no branch yield ty.cast(T, None) # type: ignore[unreachable] class Encoding(ty.Generic[T], metaclass=abc.ABCMeta): """Abstract base for a data parser/encoder interface""" #name: str is_stream = False # type: bool @abc.abstractmethod def parse_partial(self, raw: bytes) -> ty.Generator[T, ty.Any, ty.Any]: """Parses the given data and yields all complete data sets that can be built from this. Raises ------ ~ipfshttpclient.exceptions.DecodingError Parameters ---------- raw Data to be parsed """ def parse_finalize(self) -> ty.Generator[T, ty.Any, ty.Any]: """Finalizes parsing based on remaining buffered data and yields the remaining data sets Raises ------ ~ipfshttpclient.exceptions.DecodingError """ return empty_gen() @abc.abstractmethod def encode(self, obj: T) -> bytes: """Serializes the given Python object to a bytes string Raises ------ ~ipfshttpclient.exceptions.EncodingError Parameters ---------- obj Object to be encoded """ class Dummy(Encoding[bytes]): """Dummy parser/encoder that does nothing""" name = "none" is_stream = True def parse_partial(self, raw: bytes) -> ty.Generator[bytes, ty.Any, ty.Any]: """Yields the data passed into this method Parameters ---------- raw Any kind of data """ yield raw def encode(self, obj: bytes) -> bytes: """Returns the bytes representation of the data passed into this function Parameters ---------- obj Any Python object """ return obj class Json(Encoding[utils.json_value_t]): """JSON parser/encoder that handles concatenated JSON""" name = 'json' def __init__(self) -> None: self._buffer = [] # type: ty.List[ty.Optional[str]] self._decoder1 = codecs.getincrementaldecoder('utf-8')() self._decoder2 = json.JSONDecoder() self._lasterror = None # type: ty.Optional[ValueError] @ty.no_type_check # It works just fine and I don't want to rewrite it just # because mypy doesn't understand… # noqa: E114, E116 def parse_partial(self, data: bytes) -> ty.Generator[utils.json_value_t, ty.Any, ty.Any]: """Incrementally decodes JSON data sets into Python objects. Raises ------ ~ipfshttpclient.exceptions.DecodingError """ try: # Python requires all JSON data to text strings lines = self._decoder1.decode(data, False).split("\n") # Add first input line to last buffer line, if applicable, to # handle cases where the JSON string has been chopped in half # at the network level due to streaming if len(self._buffer) > 0 and self._buffer[-1] is not None: self._buffer[-1] += lines[0] self._buffer.extend(lines[1:]) else: self._buffer.extend(lines) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) from error # Process data buffer index = 0 try: # Process each line as separate buffer #PERF: This way the `.lstrip()` call becomes almost always a NOP # even if it does return a different string it will only # have to allocate a new buffer for the currently processed # line. while index < len(self._buffer): while self._buffer[index]: # Make sure buffer does not start with whitespace #PERF: `.lstrip()` does not reallocate if the string does # not actually start with whitespace. self._buffer[index] = self._buffer[index].lstrip() # Handle case where the remainder of the line contained # only whitespace if not self._buffer[index]: self._buffer[index] = None continue # Try decoding the partial data buffer and return results # from this # # Use `pragma: no branch` as the final loop iteration will always # raise if parsing didn't work out, rather then falling through # to the `yield obj` line. data = self._buffer[index] for index2 in range(index, len(self._buffer)): # pragma: no branch # If decoding doesn't succeed with the currently # selected buffer (very unlikely with our current # class of input data) then retry with appending # any other pending pieces of input data # This will happen with JSON data that contains # arbitrary new-lines: "{1:\n2,\n3:4}" if index2 > index: data += "\n" + self._buffer[index2] try: (obj, offset) = self._decoder2.raw_decode(data) except ValueError: # Treat error as fatal if we have already added # the final buffer to the input if (index2 + 1) == len(self._buffer): raise else: index = index2 break # Decoding succeeded – yield result and shorten buffer yield obj if offset < len(self._buffer[index]): self._buffer[index] = self._buffer[index][offset:] else: self._buffer[index] = None index += 1 except ValueError as error: # It is unfortunately not possible to reliably detect whether # parsing ended because of an error *within* the JSON string, or # an unexpected *end* of the JSON string. # We therefor have to assume that any error that occurs here # *might* be related to the JSON parser hitting EOF and therefor # have to postpone error reporting until `parse_finalize` is # called. self._lasterror = error finally: # Remove all processed buffers del self._buffer[0:index] def parse_finalize(self) -> ty.Generator[utils.json_value_t, ty.Any, ty.Any]: """Raises errors for incomplete buffered data that could not be parsed because the end of the input data has been reached. Raises ------ ~ipfshttpclient.exceptions.DecodingError """ try: try: # Raise exception for remaining bytes in bytes decoder self._decoder1.decode(b'', True) except UnicodeDecodeError as error: raise exceptions.DecodingError('json', error) from error # Late raise errors that looked like they could have been fixed if # the caller had provided more data if self._buffer and self._lasterror: raise exceptions.DecodingError('json', self._lasterror) from self._lasterror finally: # Reset state self._buffer = [] self._lasterror = None self._decoder1.reset() return empty_gen() def encode(self, obj: utils.json_value_t) -> bytes: """Returns ``obj`` serialized as JSON formatted bytes Raises ------ ~ipfshttpclient.exceptions.EncodingError Parameters ---------- obj JSON serializable Python object """ try: result = json.dumps(obj, sort_keys=True, indent=None, separators=(',', ':'), ensure_ascii=False) return result.encode("utf-8") except (UnicodeEncodeError, TypeError) as error: raise exceptions.EncodingError('json', error) from error # encodings supported by the IPFS api (default is JSON) __encodings = { Dummy.name: Dummy, Json.name: Json, } # type: ty.Dict[str, ty.Type[Encoding[ty.Any]]] @ty.overload def get_encoding(name: ty_ext.Literal["none"]) -> Dummy: ... @ty.overload # noqa: E302 def get_encoding(name: ty_ext.Literal["json"]) -> Json: ... def get_encoding(name: str) -> Encoding[ty.Any]: # noqa: E302 """Returns an Encoder object for the given encoding name Raises ------ ~ipfshttpclient.exceptions.EncoderMissingError Parameters ---------- name Encoding name. Supported options: * ``"none"`` * ``"json"`` """ try: return __encodings[name.lower()]() except KeyError: raise exceptions.EncoderMissingError(name) from None PK;P&"""ipfshttpclient/exceptions.py""" The class hierachy for exceptions is:: Error ├── VersionMismatch ├── AddressError ├── EncoderError │ ├── EncoderMissingError │ ├── EncodingError │ └── DecodingError └── CommunicationError ├── ProtocolError ├── StatusError ├── ErrorResponse │ └── PartialErrorResponse ├── ConnectionError └── TimeoutError """ import typing as ty import multiaddr.exceptions # type: ignore[import] class Error(Exception): """Base class for all exceptions in this module.""" __slots__ = () class AddressError(Error, multiaddr.exceptions.Error): # type: ignore[no-any-unimported, misc] """Raised when the provided daemon location Multiaddr does not match any of the supported patterns.""" __slots__ = ("addr",) #addr: ty.Union[str, bytes] def __init__(self, addr: ty.Union[str, bytes]) -> None: self.addr = addr # type: ty.Union[str, bytes] Error.__init__(self, "Unsupported Multiaddr pattern: {0!r}".format(addr)) class VersionMismatch(Error): """Raised when daemon version is not supported by this client version.""" __slots__ = ("current", "minimum", "maximum") #current: ty.Sequence[int] #minimum: ty.Sequence[int] #maximum: ty.Sequence[int] def __init__(self, current: ty.Sequence[int], minimum: ty.Sequence[int], maximum: ty.Sequence[int]) -> None: self.current = current # type: ty.Sequence[int] self.minimum = minimum # type: ty.Sequence[int] self.maximum = maximum # type: ty.Sequence[int] msg = "Unsupported daemon version '{}' (not in range: {} ≤ … < {})".format( ".".join(map(str, current)), ".".join(map(str, minimum)), ".".join(map(str, maximum)) ) super().__init__(msg) ############### # encoding.py # ############### class EncoderError(Error): """Base class for all encoding and decoding related errors.""" __slots__ = ("encoder_name",) #encoder_name: str def __init__(self, message: str, encoder_name: str) -> None: self.encoder_name = encoder_name # type: str super().__init__(message) class EncoderMissingError(EncoderError): """Raised when a requested encoder class does not actually exist.""" __slots__ = () def __init__(self, encoder_name: str) -> None: super().__init__("Unknown encoder: '{}'".format(encoder_name), encoder_name) class EncodingError(EncoderError): """Raised when encoding a Python object into a byte string has failed due to some problem with the input data.""" __slots__ = ("original",) #original: Exception def __init__(self, encoder_name: str, original: Exception) -> None: self.original = original # type: Exception super().__init__("Object encoding error: {}".format(original), encoder_name) class DecodingError(EncoderError): """Raised when decoding a byte string to a Python object has failed due to some problem with the input data.""" __slots__ = ("original",) #original: Exception def __init__(self, encoder_name: str, original: Exception) -> None: self.original = original # type: Exception super().__init__("Object decoding error: {}".format(original), encoder_name) ########### # http.py # ########### class CommunicationError(Error): """Base class for all network communication related errors.""" __slots__ = ("original",) #original: ty.Optional[Exception] def __init__(self, original: ty.Optional[Exception], _message: ty.Optional[str] = None) -> None: self.original = original # type: ty.Optional[Exception] msg = "" # type: str if _message: msg = _message else: msg = "{}: {}".format(type(original).__name__, str(original)) super().__init__(msg) class ProtocolError(CommunicationError): """Raised when parsing the response from the daemon has failed. This can most likely occur if the service on the remote end isn't in fact an IPFS daemon.""" __slots__ = () class StatusError(CommunicationError): """Raised when the daemon responds with an error to our request.""" __slots__ = () class ErrorResponse(StatusError): """Raised when the daemon has responded with an error message because the requested operation could not be carried out.""" __slots__ = () def __init__(self, message: str, original: ty.Optional[Exception]) -> None: super().__init__(original, message) class PartialErrorResponse(ErrorResponse): """Raised when the daemon has responded with an error message after having already returned some data.""" __slots__ = () def __init__(self, message: str, original: ty.Optional[Exception] = None) -> None: super().__init__(message, original) class ConnectionError(CommunicationError): """Raised when connecting to the service has failed on the socket layer.""" __slots__ = () class TimeoutError(CommunicationError): """Raised when the daemon didn't respond in time.""" __slots__ = ()PK P#Nf_f_ipfshttpclient/filescanner.pyimport abc import collections.abc import enum import fnmatch import os import re import sys import typing as ty from . import utils if sys.version_info >= (3, 7): #PY37+ re_pattern_type = re.Pattern if ty.TYPE_CHECKING: re_pattern_t = re.Pattern[ty.AnyStr] else: re_pattern_t = re.Pattern else: #PY36- re_pattern_t = re_pattern_type = type(re.compile("")) if hasattr(enum, "auto"): #PY36+ enum_auto = enum.auto elif not ty.TYPE_CHECKING: #PY35 _counter = 0 def enum_auto() -> int: global _counter _counter += 1 return _counter O_DIRECTORY = getattr(os, "O_DIRECTORY", 0) # type: int HAVE_FWALK = hasattr(os, "fwalk") # type: bool HAVE_FWALK_BYTES = HAVE_FWALK and sys.version_info >= (3, 7) # type: bool class Matcher(ty.Generic[ty.AnyStr], metaclass=abc.ABCMeta): """Represents a type that can match on file paths and decide whether they should be included in some file scanning/adding operation""" __slots__ = ("is_binary",) #is_binary: bool def __init__(self, is_binary: bool = False) -> None: self.is_binary = is_binary # type: bool @abc.abstractmethod def should_descend(self, path: ty.AnyStr) -> bool: r"""Decides whether the file scanner should descend into the given directory path Arguments --------- path A directory path upholding the same guarantees as those mentioned in :meth:`should_store` """ @abc.abstractmethod def should_report(self, path: ty.AnyStr, *, is_dir: bool) -> bool: r"""Decides whether the file scanner should store the given file or directory Note that in this case “file” may refer to anything that is not a directory and not just regular files. If the settings of the file scanner do not permit it to follow symbolic links this may even include symbolic links pointing at directories. Arguments --------- path The file or directory path to check – the argument's type depends on the type of the path originally passed to the file scanner and may either be :type:`bytes` or :type:`str`, but will usually be :type:`str` The given path is guaranteed to have the following additional properties: * It will be properly normalized: There won't be any empty (``…//…`), single-dot (``…/./…``) or (``…/../…``) directory labels or leading or trailing slashes. * Its path separator will match the one found in :var:`os.path.sep` – that is: It will be \ on Windows and / everywhere else. * It will be relative to the file scanner's base directory. is_dir Whether the given path refers to a directory, see the above paragraph for what this means exactly """ class MatchAll(ty.Generic[ty.AnyStr], Matcher[ty.AnyStr]): """I want it all – I want it now…""" __slots__ = () def should_descend(self, path: ty.AnyStr) -> utils.Literal_True: return True def should_report(self, path: ty.AnyStr, *, is_dir: bool) -> utils.Literal_True: return True class MatchNone(ty.Generic[ty.AnyStr], Matcher[ty.AnyStr]): """Fuck it""" __slots__ = () def should_descend(self, path: ty.AnyStr) -> utils.Literal_False: return False def should_report(self, path: ty.AnyStr, *, is_dir: bool) -> utils.Literal_False: return False MATCH_ALL = MatchAll() # type: MatchAll[str] MATCH_NONE = MatchNone() # type: MatchNone[str] class GlobMatcher(Matcher[ty.AnyStr], ty.Generic[ty.AnyStr]): """Matches files and directories according to the shell glob conventions For details on the syntax see the Python :py:mod:`glob` module that this class emulates. If your are accustomed the globing on real Unix shells make sure to also carefully study its limitations as these also apply here. Also not that this matcher always has recursion enabled and hence treats ``**``-labels as special. Additionally the *period_special* parameter is provided that may be used to disable the special handling of “dot-files” (files whose name starts with a leading period). One important thing to keep in mind that this is a *matcher* and works entirely I/O-less. As such, trying to include any files or directories *outside* of the matching domain will *not* work. For instance, a pattern like ``../a`` or ``b/../c`` would never match anything as a conforming file scanner would never pass in such a path, the same applies to any notion of absolute paths. This matcher will try its best to normalize or reject such cases, but if you're wondering why your pattern just won't match while pasting it into a real shell works this may be why. """ __slots__ = ("period_special", "_sep", "_pat", "_dir_only") #period_special: bool #_sep: ty.AnyStr #_pat: ty.List[ty.Optional[re_pattern_t[ty.AnyStr]]] #_dir_only: bool def __init__(self, pat: ty.AnyStr, *, period_special: bool = True): """ Arguments --------- pat The glob pattern to use for matching period_special Whether a leading period in file/directory names should be matchable by ``*``, ``?`` and ``[…]`` – traditionally they are not, but many modern shells allow one to disable this behaviour """ super().__init__(isinstance(pat, bytes)) self.period_special = period_special # type: bool self._sep = utils.maybe_fsencode(os.path.sep, pat) # type: ty.AnyStr dblstar = utils.maybe_fsencode("**", pat) # type: ty.AnyStr dot = utils.maybe_fsencode(".", pat) # type: ty.AnyStr pat_ndot = utils.maybe_fsencode(r"(?![.])", pat) # type: ty.AnyStr # Normalize path separator if os.path.altsep: pat = pat.replace(utils.maybe_fsencode(os.path.altsep, pat), self._sep) # Sanity checks for stuff that will definitely NOT EVER match # (there is another one in the loop below) assert not os.path.isabs(pat), "Absolute matching patterns will never match" # Note the extra final slash for its effect of only matching directories # # (TBH, I find it hard to see how that is useful, but everybody does it # and it keeps things consistent overall – something to only match files # would be nice however.) self._dir_only = pat.endswith(self._sep) # type: bool self._pat = [] # type: ty.List[ty.Optional[re_pattern_t[ty.AnyStr]]] for label in pat.split(self._sep): # Skip over useless path components if len(label) < 1 or label == dot: continue assert label != dot + dot, 'Matching patterns containing ".." will never match' if label == dblstar: self._pat.append(None) elif dblstar in label: raise NotImplementedError( "Using double-star (**) and other characters in the same glob " "path label ({0}) is not currently supported – please do file " "an issue if you need this!".format(os.fsdecode(label)) ) else: #re_expr: ty.AnyStr if not isinstance(label, bytes): re_expr = fnmatch.translate(label) else: re_expr = fnmatch.translate(label.decode("latin-1")).encode("latin-1") if period_special and not label.startswith(dot): re_expr = pat_ndot + re_expr self._pat.append(re.compile(re_expr)) def should_descend(self, path: ty.AnyStr) -> bool: for idx, label in enumerate(path.split(self._sep)): # Always descend into any directory below a recursive pattern as we # cannot predict what we will later do a tail match on pattern = self._pat[idx] if pattern is None: return True # Do not descend further if we reached the last label of the pattern # (unless the final pattern label is a recursive match, see above) # # This is independent of whether this *directory* will be included # or not. if idx == (len(self._pat) - 1): return False # Match the current pattern to decide whether to keep looking or not if not pattern.match(label): return False # The given path matched part of this pattern, so we should include this # directory to go further return True def should_report(self, path: ty.AnyStr, *, is_dir: bool) -> bool: # A final slash means “only match directories” if self._dir_only and not is_dir: return False labels = path.split(self._sep) # type: ty.List[ty.AnyStr] return self._match(labels, idx_pat=0, idx_path=0, is_dir=is_dir) def _match(self, labels: ty.List[ty.AnyStr], *, idx_pat: int, idx_path: int, is_dir: bool) -> bool: while idx_pat < len(self._pat): pattern = self._pat[idx_pat] if pattern is None: break # Match initial labels before recursion if idx_path >= len(labels): # Pattern refers to something below this path, store it only if it # is a directory return is_dir elif not pattern.match(labels[idx_path]): # Pattern did not match return False idx_pat += 1 idx_path += 1 dot = utils.maybe_fsencode(".", labels[0]) # type: ty.AnyStr # We reached the end of the matching labels or the start of recursion if idx_pat == len(self._pat): # End of matching labels – only include path if it was of the same # length or the previous pattern label was recursive if self._pat[idx_pat - 1] is None: return not self.period_special or not labels[idx_path].startswith(dot) else: return idx_path == len(labels) # Start of recursion – move to next label and recurse this method too # # If the path is then matched by our inferior self return success, # otherwise retry with the next path label until all labels have been # exhausted meaning that there was no match. idx_pat += 1 while idx_path < len(labels): if self._match(labels, idx_pat=idx_pat, idx_path=idx_path, is_dir=is_dir): return True # Do not add dot-files as part of recursive patterns by default if self.period_special and labels[idx_path].startswith(dot): break idx_path += 1 # Nothing matched return False class ReMatcher(Matcher[ty.AnyStr], ty.Generic[ty.AnyStr]): """Matches files and directories using a regular expression pattern See the description of :meth:`Matcher.should_store` for the specifics on how the matching path is formatted, but note that there is one important difference: In order to allow the regular expression to distinguish between files and directories, all directories (if *is_dir* is ``True``) contain a trailing forward or backward slash (depending on the platform). If you don't care about the this information you may want to add ``[\\/]?`` to end of the pattern. Unlike glob patterns, regular expressions cannot be reliably analyzed for which directories the file paths they may or may not match are in. Because of this, this matcher will cause the file scanner **to recurse into any directory it encounters** possibly causing an unnecessarily slow-down during scanning even if only very few files end up being selected. If this causes problems for you *use non-recursive glob patterns instead* or implement your own matcher with a proper :meth:`Matcher.should_descend` method. """ __slots__ = ("_pat",) #_pat: re_pattern_t[ty.AnyStr] def __init__(self, pat: ty.Union[ty.AnyStr, "re_pattern_t[ty.AnyStr]"]): self._pat = re.compile(pat) # type: re_pattern_t[ty.AnyStr] super().__init__(not (self._pat.flags & re.UNICODE)) def should_descend(self, path: ty.AnyStr) -> bool: return True def should_report(self, path: ty.AnyStr, *, is_dir: bool) -> bool: suffix = utils.maybe_fsencode(os.path.sep, path) if is_dir else type(path)() # type: ty.AnyStr return bool(self._pat.match(path + suffix)) class MetaMatcher(Matcher[ty.AnyStr], ty.Generic[ty.AnyStr]): """Match files and directories by delegating to other matchers""" __slots__ = ("_children",) #_children: ty.List[Matcher[ty.AnyStr]] def __init__(self, children: ty.List[Matcher[ty.AnyStr]]): assert len(children) > 0 super().__init__(children[0].is_binary) self._children = children # type: ty.List[Matcher[ty.AnyStr]] def should_descend(self, path: ty.AnyStr) -> bool: return any(m.should_descend(path) for m in self._children) def should_report(self, path: ty.AnyStr, *, is_dir: bool) -> bool: return any(m.should_report(path, is_dir=is_dir) for m in self._children) class NoRecusionAdapterMatcher(Matcher[ty.AnyStr], ty.Generic[ty.AnyStr]): """Matcher adapter that will prevent any recusion Takes a subordinate matcher, but tells the scanner to never descend into any child directory and to never store files from such a directory. This is an effective way to prevent any non-top-level files from being emitted by the scanner and hence provides ``recursive=False`` semantics. """ __slots__ = ("_child",) #_child: Matcher[ty.AnyStr] def __init__(self, child: Matcher[ty.AnyStr]): super().__init__(child.is_binary) self._child = child # type: Matcher[ty.AnyStr] def should_descend(self, path: ty.AnyStr) -> bool: return False def should_report(self, path: ty.AnyStr, *, is_dir: bool) -> bool: return utils.maybe_fsencode(os.path.sep, path) not in path \ and self._child.should_report(path, is_dir=is_dir) if ty.TYPE_CHECKING: _match_spec_t = ty.Union[ty.AnyStr, re_pattern_t[ty.AnyStr], Matcher[ty.AnyStr]] else: # Using `re_pattern_t` here like in the type checking case makes # sphinx_autodoc_typehints explode # noqa: E114 _match_spec_t = ty.Union[ty.AnyStr, re_pattern_t, Matcher[ty.AnyStr]] match_spec_t = ty.Union[ ty.Iterable[_match_spec_t[ty.AnyStr]], _match_spec_t[ty.AnyStr] ] @ty.overload def matcher_from_spec(spec: match_spec_t[ty.AnyStr], *, period_special: bool = ..., recursive: bool = ...) -> Matcher[ty.AnyStr]: ... @ty.overload # noqa: E302 def matcher_from_spec(spec: None, *, period_special: bool = ..., recursive: bool = ...) -> Matcher[str]: ... def matcher_from_spec(spec: match_spec_t[ty.AnyStr], *, # type: ignore[misc] # noqa: E302 period_special: bool = True, recursive: bool = True) -> Matcher[ty.AnyStr]: """Processes the given simplified matching spec, creating an equivalent :type:`Matcher` object""" if not recursive: return NoRecusionAdapterMatcher( matcher_from_spec(spec, recursive=True, period_special=period_special) ) if spec is None: return MATCH_ALL # mypy bug: This should cause a type error but does not? elif isinstance(spec, re_pattern_type): return ReMatcher(spec) elif isinstance(spec, (str, bytes)): return GlobMatcher(spec, period_special=period_special) elif isinstance(spec, collections.abc.Iterable) and not isinstance(spec, Matcher): spec = ty.cast(ty.Iterable[_match_spec_t[ty.AnyStr]], spec) # type: ignore[redundant-cast] matchers = [matcher_from_spec(s, # type: ignore[arg-type] # mypy bug recursive=recursive, period_special=period_special) for s in spec] if len(matchers) == 0: # Edge case: Empty list of matchers return MATCH_NONE # type: ignore[return-value] elif len(matchers) == 1: # Edge case: List of exactly one matcher return matchers[0] # type: ignore[return-value] # same mypy bug else: # Actual list of matchers (plural) return MetaMatcher(matchers) # type: ignore[arg-type] # same mypy bug else: return spec if ty.TYPE_CHECKING: from .filescanner_ty import FSNodeType, FSNodeEntry else: class FSNodeType(enum.Enum): FILE = enum_auto() DIRECTORY = enum_auto() FSNodeEntry = ty.NamedTuple("FSNodeEntry", [ ("type", FSNodeType), ("path", ty.AnyStr), ("relpath", ty.AnyStr), ("name", ty.AnyStr), ("parentfd", ty.Optional[int]) ]) class walk(ty.Generator[FSNodeEntry, ty.Any, None], ty.Generic[ty.AnyStr]): __slots__ = ("_generator", "_close_fd") #_generator: ty.Generator[FSNodeEntry, ty.Any, None] #_close_fd: ty.Optional[int] def __init__( self, directory: ty.Union[ty.AnyStr, utils.PathLike[ty.AnyStr], int], match_spec: ty.Optional[match_spec_t[ty.AnyStr]] = None, *, follow_symlinks: bool = False, intermediate_dirs: bool = True, period_special: bool = True, recursive: bool = True ) -> None: """ Arguments --------- directory Path to, or file descriptor of, directory to scan match_spec Matching rules for limiting the files and directories to include in the scan By default everything will be scanned and included. follow_symlinks Follow symbolic links while scanning? period_special The value to pass to the *period_special* argument of :class:`GlobMatcher` when constructing such an object from the given *match_spec* intermediate_dirs When reporting a file or directory first ensure that all containing directories were already reported Due to the way matching works, a matcher may only ask for its target files to be included but not care about the directories leading up to that file and this would cause the file to be reported without these intermediate directories to the caller as well. If you require these directories to be reported for consistency, this option will keep track of these intermediate paths and make it appear as if these had been included up-front. recursive Recurse into the given directory while scanning? If ``False`` this will wrap the given matcher inside :class:`NoRecusionAdapterMatcher` and hence prevent the scanner from doing any recursion. """ self._close_fd = None # type: ty.Optional[int] # Create matcher object matcher = matcher_from_spec( match_spec, recursive=recursive, period_special=period_special ) # type: Matcher[ty.AnyStr] # type: ignore[assignment] # Convert directory path to string … if isinstance(directory, int): if not HAVE_FWALK: raise NotImplementedError("Passing a file descriptor as directory is " "not supported on this platform") self._generator = self._walk( directory, None, matcher, follow_symlinks, intermediate_dirs ) # type: ty.Generator[FSNodeEntry, ty.Any, None] else: #directory_str: ty.AnyStr if hasattr(os, "fspath"): #PY36+ directory_str = os.fspath(directory) elif not ty.TYPE_CHECKING: #PY35 directory_str = utils.convert_path(directory) # Best-effort ensure that target directory exists if it is accessed by path os.stat(directory_str) # … and possibly open it as a FD if this is supported by the platform # # Note: `os.fwalk` support for binary paths was only added in 3.7+. directory_str_or_fd = directory_str # type: ty.Union[ty.AnyStr, int] if HAVE_FWALK and (not isinstance(directory_str, bytes) or HAVE_FWALK_BYTES): self._close_fd = directory_str_or_fd = os.open(directory_str, os.O_RDONLY | O_DIRECTORY) self._generator = self._walk( directory_str_or_fd, directory_str, matcher, follow_symlinks, intermediate_dirs ) def __iter__(self) -> 'walk[ty.AnyStr]': return self def __next__(self) -> FSNodeEntry: return next(self._generator) def __enter__(self) -> 'walk[ty.AnyStr]': return self def __exit__(self, *a: ty.Any) -> None: self.close() def send(self, value: ty.Any) -> FSNodeEntry: return self._generator.send(value) def throw(self, typ: ty.Type[BaseException], val: ty.Optional[BaseException] = None, tb: ty.Any = None) -> FSNodeEntry: try: return self._generator.throw(typ, val, tb) except: if self._close_fd is not None: os.close(self._close_fd) self._close_fd = None raise def close(self) -> None: try: self.throw(GeneratorExit) except GeneratorExit: pass @staticmethod def _join_dirs_and_files(dirnames: ty.List[ty.AnyStr], filenames: ty.List[ty.AnyStr]) \ -> ty.Iterator[ty.Tuple[ty.AnyStr, bool]]: for dirname in dirnames: yield (dirname, True) for filename in filenames: yield (filename, False) def _walk( self, directory: ty.Union[ty.AnyStr, int], directory_str: ty.Optional[ty.AnyStr], matcher: Matcher[ty.AnyStr], follow_symlinks: bool, intermediate_dirs: bool ) -> ty.Generator[FSNodeEntry, ty.Any, None]: if directory_str is not None: sep = utils.maybe_fsencode(os.path.sep, directory_str) elif matcher is not None and matcher.is_binary: # type: ignore[unreachable] sep = os.fsencode(os.path.sep) # type: ignore[assignment] else: sep = os.path.sep # type: ignore[assignment] dot = utils.maybe_fsencode(".", sep) # type: ty.AnyStr # Identify the leading portion of the `dirpath` returned by `os.walk` # that should be dropped if not isinstance(directory, int): while directory.endswith(sep): directory = directory[:-len(sep)] prefix = (directory if not isinstance(directory, int) else dot) + sep reported_directories = set() # type: ty.Set[ty.AnyStr] # Always report the top-level directory even if nothing therein is matched reported_directories.add(utils.maybe_fsencode("", sep)) yield FSNodeEntry( # type: ignore[misc] # mypy bug: gh/python/mypy#685 type = FSNodeType.DIRECTORY, path = prefix[:-len(sep)], # type: ignore[arg-type] relpath = dot, # type: ignore[arg-type] name = dot, # type: ignore[arg-type] parentfd = None ) if not isinstance(directory, int): walk_iter = os.walk(directory, followlinks=follow_symlinks ) # type: ty.Union[ty.Iterator[ty.Tuple[ty.AnyStr, ty.List[ty.AnyStr], ty.List[ty.AnyStr], int]], ty.Iterator[ty.Tuple[ty.AnyStr, ty.List[ty.AnyStr], ty.List[ty.AnyStr]]]] # noqa: E501 else: walk_iter = os.fwalk(dot, dir_fd=directory, follow_symlinks=follow_symlinks) try: for result in walk_iter: dirpath, dirnames, filenames = result[0:3] dirfd = result[3] if len(result) > 3 else None # type:ty.Optional[int] # type: ignore[misc] # Remove the directory prefix from the received path _, _, dirpath = dirpath.partition(prefix) # Keep track of reported intermediaries, so that we only check for # these at most once per directory base intermediates_reported = False # type: bool for filename, is_dir in self._join_dirs_and_files(list(dirnames), filenames): filepath = os.path.join(dirpath, filename) # Check if matcher thinks we should descend into this directory if is_dir and not matcher.should_descend(filepath): dirnames.remove(filename) # Check if matcher thinks we should report this node if not matcher.should_report(filepath, is_dir=is_dir): continue # Ensure that all containing directories are reported # before reporting this node if not intermediates_reported and intermediate_dirs: parts = dirpath.split(sep) for end_offset in range(len(parts)): parent_dirpath = sep.join(parts[0:(end_offset + 1)]) if parent_dirpath not in reported_directories: reported_directories.add(parent_dirpath) yield FSNodeEntry( # type: ignore[misc] # mypy bug: gh/python/mypy#685 type = FSNodeType.DIRECTORY, path = (prefix + parent_dirpath), # type: ignore[arg-type] relpath = parent_dirpath, # type: ignore[arg-type] name = parts[end_offset], # type: ignore[arg-type] parentfd = None ) intermediates_reported = True # Report the target file or directory if is_dir: reported_directories.add(filepath) yield FSNodeEntry( # type: ignore[misc] # mypy bug: gh/python/mypy#685 type = FSNodeType.DIRECTORY, path = (prefix + filepath), # type: ignore[arg-type] relpath = filepath, # type: ignore[arg-type] name = filename, # type: ignore[arg-type] parentfd = dirfd ) else: yield FSNodeEntry( # type: ignore[misc] # mypy bug: gh/python/mypy#685 type = FSNodeType.FILE, path = (prefix + filepath), # type: ignore[arg-type] relpath = filepath, # type: ignore[arg-type] name = filename, # type: ignore[arg-type] parentfd = dirfd ) finally: # Make sure the file descriptors bound by `os.fwalk` are freed on error walk_iter.close() # type: ignore[union-attr] # typeshed bug # Close root file descriptor of `os.fwalk` as well if self._close_fd is not None: os.close(self._close_fd) self._close_fd = None if HAVE_FWALK: # pragma: no cover supports_fd = frozenset({walk}) # type: ty.FrozenSet[ty.Callable[..., ty.Any]] else: # pragma: no cover supports_fd = frozenset() PKPP8F!ipfshttpclient/filescanner_ty.pyiimport enum import typing as ty class FSNodeType(enum.Enum): FILE = enum.auto() DIRECTORY = enum.auto() class FSNodeEntry(ty.Generic[ty.AnyStr], ty.NamedTuple): type: FSNodeType path: ty.AnyStr relpath: ty.AnyStr name: ty.AnyStr parentfd: ty.Optional[int]PKP>6ipfshttpclient/http.py"""Default HTTP client selection proxy""" import os from .http_common import ( StreamDecodeIteratorSync, addr_t, auth_t, cookies_t, headers_t, params_t, reqdata_sync_t, timeout_t, workarounds_t, ) __all__ = ( "addr_t", "auth_t", "cookies_t", "headers_t", "params_t", "reqdata_sync_t", "timeout_t", "workarounds_t", "ClientSync", "StreamDecodeIteratorSync", ) PREFER_HTTPX = (os.environ.get("PY_IPFS_HTTP_CLIENT_PREFER_HTTPX", "no").lower() not in ("0", "f", "false", "n", "no")) if PREFER_HTTPX: # pragma: http-backend=httpx try: #PY36+ from . import http_httpx as _backend except (ImportError, SyntaxError): #PY35 from . import http_requests as _backend # type: ignore[no-redef] else: # pragma: http-backend=requests try: from . import http_requests as _backend # type: ignore[no-redef] except ImportError: # pragma: no cover from . import http_httpx as _backend ClientSync = _backend.ClientSyncPKֱQY{SSipfshttpclient/http_common.pyimport abc import socket import sys import tarfile import typing as ty import urllib.parse import multiaddr # type: ignore[import] from multiaddr.protocols import (P_DNS, P_DNS4, P_DNS6, # type: ignore[import] P_HTTP, P_HTTPS, P_IP4, P_IP6, P_TCP, P_UNIX) from . import encoding from . import exceptions from . import utils AF_UNIX = getattr(socket, "AF_UNIX", NotImplemented) if ty.TYPE_CHECKING: import http.cookiejar # noqa: F401 from typing_extensions import Literal, Protocol # noqa: F401 else: Protocol = utils.Protocol if sys.version_info >= (3, 8): #PY38+ ty_Literal_json = ty.Literal["json"] ty_Literal_none = ty.Literal["none"] else: #PY37- ty_Literal_json = ty_Literal_none = str class Closable(Protocol): def close(self) -> None: ... T_co = ty.TypeVar("T_co", covariant=True) S = ty.TypeVar("S", bound=Closable) addr_t = ty.Union[multiaddr.Multiaddr, bytes, str] auth_t = ty.Optional[ty.Tuple[ty.Union[str, bytes], ty.Union[str, bytes]]] cookies_t = ty.Optional[ty.Union[ "http.cookiejar.CookieJar", ty.Dict[str, str] ]] headers_t = ty.Optional[ty.Union[ ty.Dict[str, str], ty.Dict[bytes, bytes], ty.Sequence[ty.Tuple[str, str]], ty.Sequence[ty.Tuple[bytes, bytes]], ]] params_t = ty.Optional[ty.Sequence[ty.Tuple[str, str]]] reqdata_sync_t = ty.Optional[ty.Iterator[bytes]] timeout_t = ty.Optional[ty.Union[ float, ty.Tuple[float, float], ]] workarounds_t = ty.Optional[ty.Set[str]] def _notify_stream_iter_closed() -> None: pass # Mocked by unit tests to check if the decode iterator is closed at proper times class StreamDecodeIteratorSync(ty.Generic[T_co]): """Wrapper around a bytes generator that decodes and yields data as it is received, automatically closing all attached resources when the input stream ceases Parameters ---------- closables List of objects to `.close()` once this iterator has been exhausted or is manually closed response Generator returning the bytes to decode and yield Will be closed in addition to all objects in *closables* when the time comes. parser Decoder (see :class:`~ipfshttpclient.encoding.Encoding`) that takes the bytes yielded by *response* and emits decoded Python objects. """ def __init__( self, closables: ty.List[Closable], response: ty.Generator[bytes, ty.Any, ty.Any], parser: encoding.Encoding[T_co] ): self._closables = closables # type: ty.List[Closable] self._parser = parser # type: ty.Optional[encoding.Encoding[T_co]] self._response_iter = response # type: ty.Optional[ty.Generator[bytes, ty.Any, ty.Any]] self._parser_iter = None # type: ty.Optional[ty.Generator[T_co, ty.Any, ty.Any]] def __iter__(self) -> "StreamDecodeIteratorSync[T_co]": return self def __next__(self) -> T_co: while True: # Try reading from current parser iterator parser_iter = self._parser_iter if parser_iter is not None: try: result = next(parser_iter) # type: T_co # Detect late error messages that occured after some data # has already been sent if isinstance(result, dict) and result.get("Type") == "error": msg = result["Message"] raise exceptions.PartialErrorResponse(msg) return result except StopIteration: self._parser_iter = parser_iter = None # Forward exception to caller if we do not expect any # further data if self._response_iter is None: self.close() raise # Iterator fuse to prevent crash after EOS/.close() if self._response_iter is None: self.close() raise StopIteration() assert self._parser is not None try: data = next(self._response_iter) # type: bytes # Create new parser iterator using the newly received data if len(data) > 0: self._parser_iter = self._parser.parse_partial(data) except StopIteration: # No more data to receive – destroy response iterator and # iterate over the final fragments returned by the parser self._response_iter = None self._parser_iter = self._parser.parse_finalize() def __enter__(self) -> "StreamDecodeIteratorSync[T_co]": return self def __exit__(self, *a: ty.Any) -> None: self.close() def close(self) -> None: # Clean up any open iterators first if self._response_iter is not None: self._response_iter.close() if self._parser_iter is not None: self._parser_iter.close() self._response_iter = None self._parser_iter = None # Clean up response object and parser for closable in self._closables: closable.close() self._closables.clear() self._parser = None _notify_stream_iter_closed() @ty.overload def stream_decode_full( response: Closable, response_iter: ty.Generator[bytes, ty.Any, ty.Any], parser: encoding.Dummy ) -> bytes: ... @ty.overload # noqa: E302 def stream_decode_full( closables: ty.List[Closable], response: ty.Generator[bytes, ty.Any, ty.Any], parser: encoding.Json ) -> ty.List[utils.json_dict_t]: ... def stream_decode_full( # type: ignore[misc] # noqa: E302 closables: ty.List[Closable], response: ty.Generator[bytes, ty.Any, ty.Any], parser: encoding.Encoding[T_co] ) -> ty.Union[ty.List[T_co], bytes]: with StreamDecodeIteratorSync(closables, response, parser) as response_iter: # Collect all responses result = list(response_iter) # type: ty.List[T_co] # Return byte streams concatenated into one message, instead of split # at arbitrary boundaries if parser.is_stream: return b"".join(result) # type: ignore[arg-type] return result class ReadableStreamWrapper: """Bytes iterator wrapper that exposes a fileobj compatible `.read(n=None)` and `.close()` interface""" def __init__(self, generator: ty.Generator[bytes, ty.Any, ty.Any]): self._buffer = bytearray() self._generator = generator def read(self, length: ty.Optional[int] = None) -> bytes: # Handle “take all” mode if length is None: buffer = self._buffer for chunk in self._generator: buffer.extend(chunk) try: return bytes(buffer) finally: buffer.clear() # Handle buffered mode if the current buffer is not empty # # This may return short reads, but we don't care as that is valid as long # as at least 1 byte is returned. if len(self._buffer) > 0: try: return bytes(self._buffer[0:length]) finally: del self._buffer[0:length] # Handle buffered mode if we need to request new data from the iterator try: chunk = b"" while len(chunk) < 1: chunk = next(self._generator) except StopIteration: return b"" else: try: return bytes(chunk[0:length]) finally: self._buffer.extend(chunk[length:]) def close(self) -> None: self._generator.close() self._buffer.clear() def multiaddr_to_url_data(addr: addr_t, base: str # type: ignore[no-any-unimported] ) -> ty.Tuple[str, ty.Optional[str], socket.AddressFamily, bool]: try: addr = multiaddr.Multiaddr(addr) except multiaddr.exceptions.ParseError as error: raise exceptions.AddressError(addr) from error addr_iter = iter(addr.items()) # Parse the `host`, `family`, `port` & `secure` values from the given # multiaddr, raising on unsupported `addr` values try: # Read host value proto, host = next(addr_iter) family = socket.AF_UNSPEC host_numeric = proto.code in (P_IP4, P_IP6) uds_path = None # type: ty.Optional[str] if proto.code in (P_IP4, P_DNS4): family = socket.AF_INET elif proto.code in (P_IP6, P_DNS6): family = socket.AF_INET6 elif proto.code == P_UNIX and AF_UNIX is not NotImplemented: family = AF_UNIX uds_path = host elif proto.code != P_DNS: raise exceptions.AddressError(addr) if family == AF_UNIX: assert uds_path is not None netloc = urllib.parse.quote(uds_path, safe="") else: # Read port value for IP-based transports proto, port = next(addr_iter) if proto.code != P_TCP: raise exceptions.AddressError(addr) # Pre-format network location URL part based on host+port if ":" in host and not host.startswith("["): netloc = "[{0}]:{1}".format(host, port) else: netloc = "{0}:{1}".format(host, port) # Read application-level protocol name secure = False try: proto, value = next(addr_iter) except StopIteration: pass else: if proto.code == P_HTTPS: secure = True elif proto.code != P_HTTP: raise exceptions.AddressError(addr) # No further values may follow; this also exhausts the iterator was_final = all(False for _ in addr_iter) if not was_final: raise exceptions.AddressError(addr) except StopIteration: raise exceptions.AddressError(addr) from None if not base.endswith("/"): base += "/" # Convert the parsed `addr` values to a URL base and parameters for the # HTTP library base_url = urllib.parse.SplitResult( scheme = "http" if not secure else "https", netloc = netloc, path = base, query = "", fragment = "" ).geturl() return base_url, uds_path, family, host_numeric def map_args_to_params( args: ty.Sequence[str], opts: ty.Mapping[str, str], *, offline: bool = False ) -> ty.List[ty.Tuple[str, str]]: params = [] # type: ty.List[ty.Tuple[str, str]] if offline: params.append(('offline', 'true')) params.extend(opts.items()) for arg in args: params.append(('arg', arg)) return params class ClientSyncBase(ty.Generic[S], metaclass=abc.ABCMeta): """An HTTP client for interacting with the IPFS daemon Parameters ---------- addr The address where the IPFS daemon may be reached base The path prefix for API calls offline Ask daemon to operate in “offline mode” – that is, it should not consult the network when unable to find resources locally, but fail instead workarounds List of daemon workarounds to apply auth HTTP basic authentication `(username, password)` tuple to send along with each request to the API daemon cookies HTTP cookies to send along with each request to the API daemon headers Custom HTTP headers to send along with each request to the API daemon timeout The default number of seconds to wait when establishing a connection to the daemon and waiting for returned data before throwing :exc:`~ipfshttpclient.exceptions.TimeoutError`; if the value is a tuple its contents will be interpreted as the values for the connection and receiving phases respectively, otherwise the value will apply to both phases; if the value is ``None`` then all timeouts will be disabled """ __slots__ = ("_session", "workarounds") #_session: ty.Optional[S] #workarounds: ty.Set[str] def __init__(self, addr: addr_t, base: str, *, # type: ignore[no-any-unimported] offline: bool = False, workarounds: workarounds_t = None, auth: auth_t = None, cookies: cookies_t = None, headers: headers_t = None, timeout: timeout_t = None) -> None: self._session = None # type: ty.Optional[S] self.workarounds = workarounds if workarounds else set() # type: ty.Set[str] #XXX: Figure out what stream-channels is and if we still need it params = map_args_to_params((), { "stream-channels": "true", }, offline=offline) # Initialize child self._init( addr, base, auth=auth, cookies=cookies, headers=headers, params=params, timeout=timeout, ) @abc.abstractmethod def _init(self, addr: addr_t, base: str, *, # type: ignore[no-any-unimported] auth: auth_t, cookies: cookies_t, headers: headers_t, params: params_t, timeout: timeout_t) -> None: ... @abc.abstractmethod def _make_session(self) -> S: ... def _access_session(self) -> ty.Tuple[ty.List[Closable], S]: if self._session is not None: return [], self._session else: session = self._make_session() return [session], session def open_session(self) -> None: """Open a persistent backend session that allows reusing HTTP connections between individual HTTP requests. It is an error to call this function if a session is already open.""" assert self._session is None self._session = self._make_session() def close_session(self) -> None: """Close a session opened by :meth:`~ipfshttpclient.http.HTTPClient.open_session`. If there is no session currently open (ie: it was already closed), then this method does nothing.""" if self._session is not None: self._session.close() self._session = None @abc.abstractmethod def _request( self, method: str, path: str, params: ty.Sequence[ty.Tuple[str, str]], *, auth: auth_t, data: reqdata_sync_t, headers: headers_t, timeout: timeout_t, chunk_size: ty.Optional[int] ) -> ty.Tuple[ty.List[Closable], ty.Generator[bytes, ty.Any, ty.Any]]: ... #XXX: There must be some way to make the following shorter… @ty.overload def request( self, path: str, args: ty.Sequence[str] = [], *, opts: ty.Mapping[str, str] = {}, decoder: str = "none", stream: bool = False, offline: bool = False, return_result: utils.Literal_False, auth: auth_t = None, cookies: cookies_t = None, data: reqdata_sync_t = None, headers: headers_t = None, timeout: timeout_t = None ) -> None: ... @ty.overload def request( self, path: str, args: ty.Sequence[str] = [], *, opts: ty.Mapping[str, str] = {}, decoder: ty_Literal_none = "none", stream: utils.Literal_False = False, offline: bool = False, return_result: utils.Literal_True = True, auth: auth_t = None, cookies: cookies_t = None, data: reqdata_sync_t = None, headers: headers_t = None, timeout: timeout_t = None ) -> bytes: ... @ty.overload def request( self, path: str, args: ty.Sequence[str] = [], *, opts: ty.Mapping[str, str] = {}, decoder: ty_Literal_none = "none", stream: utils.Literal_True, offline: bool = False, return_result: utils.Literal_True = True, auth: auth_t = None, cookies: cookies_t = None, data: reqdata_sync_t = None, headers: headers_t = None, timeout: timeout_t = None ) -> StreamDecodeIteratorSync[bytes]: ... @ty.overload def request( self, path: str, args: ty.Sequence[str] = [], *, opts: ty.Mapping[str, str] = {}, decoder: ty_Literal_json, stream: utils.Literal_False = False, offline: bool = False, return_result: utils.Literal_True = True, auth: auth_t = None, cookies: cookies_t = None, data: reqdata_sync_t = None, headers: headers_t = None, timeout: timeout_t = None ) -> ty.List[utils.json_dict_t]: ... @ty.overload def request( self, path: str, args: ty.Sequence[str] = [], *, opts: ty.Mapping[str, str] = {}, decoder: ty_Literal_json, stream: utils.Literal_True, offline: bool = False, return_result: utils.Literal_True = True, auth: auth_t = None, cookies: cookies_t = None, data: reqdata_sync_t = None, headers: headers_t = None, timeout: timeout_t = None ) -> StreamDecodeIteratorSync[utils.json_dict_t]: ... def request( # type: ignore[misc] self, path: str, args: ty.Sequence[str] = [], *, opts: ty.Mapping[str, str] = {}, decoder: ty.Union[ty_Literal_json, ty_Literal_none] = "none", stream: bool = False, offline: bool = False, return_result: bool = True, auth: auth_t = None, cookies: cookies_t = None, data: reqdata_sync_t = None, headers: headers_t = None, timeout: timeout_t = None ) -> ty.Optional[ty.Union[ # noqa: ET122 (checker bug) StreamDecodeIteratorSync[bytes], StreamDecodeIteratorSync[utils.json_dict_t], bytes, ty.List[utils.json_dict_t], ]]: """Sends an HTTP request to the IPFS daemon This function returns the contents of the HTTP response from the IPFS daemon. Raises ------ ~ipfshttpclient.exceptions.ErrorResponse ~ipfshttpclient.exceptions.ConnectionError ~ipfshttpclient.exceptions.ProtocolError ~ipfshttpclient.exceptions.StatusError ~ipfshttpclient.exceptions.TimeoutError Parameters ---------- path The command path relative to the given base decoder The name of the encoder to use to parse the HTTP response stream Whether to return an iterable yielding the received items incrementally instead of receiving and decoding all items up-front before returning them args Positional parameters to be sent along with the HTTP request opts Query string paramters to be sent along with the HTTP request offline Whether to request to daemon to handle this request in “offline-mode” return_result Whether to decode the values received from the daemon auth Authentication data to send along with this request as ``(username, password)`` tuple cookies HTTP cookies to send along with each request to the API daemon data Iterable yielding data to stream from the client to the daemon headers Custom HTTP headers to pass send along with the request timeout How many seconds to wait for the server to send data before giving up Set this to :py:`math.inf` to disable timeouts entirely. """ # Don't attempt to decode response or stream # (which would keep an iterator open that will then never be waited for) if not return_result: decoder = "none" # HTTP method must always be "POST" since go-IPFS 0.5 method = "POST" if "use_http_head_for_no_result" in self.workarounds and not return_result: # pragma: no cover method = "HEAD" parser = encoding.get_encoding(decoder) closables, res = self._request( method, path, map_args_to_params(args, opts, offline=offline), auth=auth, data=data, headers=headers, timeout=timeout, chunk_size=None, ) # type: ty.Tuple[ty.List[Closable], ty.Generator[bytes, ty.Any, ty.Any]] try: if not return_result: for closable in closables: closable.close() return None elif stream: # Decode each item as it is read return StreamDecodeIteratorSync(closables, res, parser) # type: ignore[misc] else: # Decode received item immediately return stream_decode_full(closables, res, parser) # type: ignore[arg-type] except: # Extra cleanup code for closables # # At the time of writing, there does not appear to be any way to # trigger this loop in practice, but we keep it for as extra level # of defence in case things slightly change later-on. for closable in closables: # pragma: no cover closable.close() raise def download( self, path: str, target: utils.path_t = ".", args: ty.Sequence[str] = [], *, opts: ty.Mapping[str, str] = {}, compress: bool = False, offline: bool = False, auth: auth_t = None, cookies: cookies_t = None, data: reqdata_sync_t = None, headers: headers_t = None, timeout: timeout_t = None ) -> None: """Downloads a directory from the IPFS daemon Downloads a file or files from IPFS into the current working directory, or the directory given by ``target``. Raises ------ ~ipfshttpclient.exceptions.ErrorResponse ~ipfshttpclient.exceptions.ConnectionError ~ipfshttpclient.exceptions.ProtocolError ~ipfshttpclient.exceptions.StatusError ~ipfshttpclient.exceptions.TimeoutError Parameters ---------- path The command path relative to the given base target The local path where downloaded files should be stored at Defaults to the current working directory. args Positional parameters to be sent along with the HTTP request opts Query string paramters to be sent along with the HTTP request compress Whether the downloaded file should be GZip compressed by the daemon before being sent to the client This may greatly speed up things if data is sent across slower networks like the internet but is a major bottleneck when communicating with the daemon on ``localhost``. offline Whether to request to daemon to handle this request in “offline-mode” return_result Whether to decode the values received from the daemon auth Authentication data to send along with this request as ``(username, password)`` tuple cookies HTTP cookies to send along with each request to the API daemon data Iterable yielding data to stream from the client to the daemon headers Custom HTTP headers to pass send along with the request timeout How many seconds to wait for the server to send data before giving up Set this to :py:`math.inf` to disable timeouts entirely. """ opts2 = dict(opts.items()) # type: ty.Dict[str, str] opts2["archive"] = "true" opts2["compress"] = "true" if compress else "false" closables, res = self._request( "POST", path, map_args_to_params(args, opts2, offline=offline), auth=auth, data=data, headers=headers, timeout=timeout, chunk_size=tarfile.RECORDSIZE, ) try: # try to stream download as a tar file stream mode = 'r|gz' if compress else 'r|' fileobj = ReadableStreamWrapper(res) # type: ty.IO[bytes] # type: ignore[assignment] with tarfile.open(fileobj=fileobj, mode=mode) as tf: tf.extractall(path=utils.convert_path(target)) finally: for closable in closables: closable.close()PKtQJQipfshttpclient/http_httpx.py"""HTTP client for API requests based on HTTPx This will be supplemented by an asynchronous version based on HTTPx's asynchronous API soon™. """ import math import socket import typing as ty import httpcore import httpx from . import encoding from . import exceptions from .http_common import ( ClientSyncBase, multiaddr_to_url_data, addr_t, auth_t, cookies_t, headers_t, params_t, reqdata_sync_t, timeout_t, Closable, ) if ty.TYPE_CHECKING: import httpx._types import typing_extensions # By using the precise types from HTTPx we'll also get type errors if our # types become somehow incompatible with the ones from that library RequestArgs = typing_extensions.TypedDict("RequestArgs", { "auth": "httpx._types.AuthTypes", "cookies": "httpx._types.CookieTypes", "headers": "httpx._types.HeaderTypes", "timeout": "httpx._types.TimeoutTypes", "params": "httpx._types.QueryParamTypes", }, total=False) else: RequestArgs = ty.Dict[str, ty.Any] def map_args_to_httpx( *, auth: auth_t = None, cookies: cookies_t = None, headers: headers_t = None, params: params_t = None, timeout: timeout_t = None, ) -> RequestArgs: kwargs: RequestArgs = {} if auth is not None: kwargs["auth"] = auth if cookies is not None: kwargs["cookies"] = cookies if headers is not None: kwargs["headers"] = headers if timeout is not None: if isinstance(timeout, tuple): kwargs["timeout"] = ( timeout[0] if timeout[0] < math.inf else None, timeout[1] if timeout[1] < math.inf else None, None, None, ) else: kwargs["timeout"] = timeout if timeout < math.inf else None if params is not None: kwargs["params"] = list(params) return kwargs class ClientSync(ClientSyncBase[httpx.Client]): __slots__ = ("_session_base", "_session_kwargs", "_session_laddr", "_session_uds_path") _session_base: "httpx._types.URLTypes" _session_kwargs: RequestArgs _session_laddr: ty.Optional[str] _session_uds_path: ty.Optional[str] def _init(self, addr: addr_t, base: str, *, # type: ignore[no-any-unimported] auth: auth_t, cookies: cookies_t, headers: headers_t, params: params_t, timeout: timeout_t) -> None: base_url: str uds_path: ty.Optional[str] family: socket.AddressFamily host_numeric: bool base_url, uds_path, family, host_numeric = multiaddr_to_url_data(addr, base) self._session_laddr = None self._session_uds_path = None if family != socket.AF_UNSPEC: if family == socket.AF_INET: self._session_laddr = "0.0.0.0" elif family == socket.AF_INET6: self._session_laddr = "::" elif family == socket.AF_UNIX: self._session_uds_path = uds_path else: assert False, ("multiaddr_to_url_data should only return a socket " "address family of AF_INET, AF_INET6 or AF_UNSPEC") self._session_base = base_url self._session_kwargs = map_args_to_httpx( auth=auth, cookies=cookies, headers=headers, params=params, timeout=timeout, ) def _make_session(self) -> httpx.Client: connection_pool = httpcore.SyncConnectionPool( local_address = self._session_laddr, uds = self._session_uds_path, #XXX: Argument values duplicated from httpx._client.Client._init_transport: keepalive_expiry = 5.0, #XXX: Value duplicated from httpx._client.KEEPALIVE_EXPIRY max_connections = 100, #XXX: Value duplicated from httpx._config.DEFAULT_LIMITS max_keepalive_connections = 20, #XXX: Value duplicated from httpx._config.DEFAULT_LIMITS ssl_context = httpx.create_ssl_context(trust_env=True), ) return httpx.Client(**self._session_kwargs, base_url = self._session_base, transport = connection_pool) def _do_raise_for_status(self, response: httpx.Response) -> None: try: response.raise_for_status() except httpx.HTTPError as error: content: ty.List[object] = [] try: decoder: encoding.Json = encoding.get_encoding("json") for chunk in response.iter_bytes(): content += list(decoder.parse_partial(chunk)) content += list(decoder.parse_finalize()) except exceptions.DecodingError: pass # If we have decoded an error response from the server, # use that as the exception message; otherwise, just pass # the exception on to the caller. if len(content) == 1 \ and isinstance(content[0], dict) \ and "Message" in content[0]: msg: str = content[0]["Message"] raise exceptions.ErrorResponse(msg, error) from error else: raise exceptions.StatusError(error) from error def _request( self, method: str, path: str, params: ty.Sequence[ty.Tuple[str, str]], *, auth: auth_t, data: reqdata_sync_t, headers: headers_t, timeout: timeout_t, chunk_size: ty.Optional[int], ) -> ty.Tuple[ty.List[Closable], ty.Generator[bytes, ty.Any, ty.Any]]: # Ensure path is relative so that it is resolved relative to the base while path.startswith("/"): path = path[1:] try: # Determine session object to use closables: ty.List[Closable] session: httpx.Client closables, session = self._access_session() # Do HTTP request (synchronously) and map exceptions try: res: httpx.Response = session.stream( method=method, url=path, **map_args_to_httpx( params=params, auth=auth, headers=headers, timeout=timeout, ), data=data, ).__enter__() closables.insert(0, res) except (httpx.ConnectTimeout, httpx.ReadTimeout, httpx.WriteTimeout) as error: raise exceptions.TimeoutError(error) from error except httpx.NetworkError as error: raise exceptions.ConnectionError(error) from error except httpx.ProtocolError as error: raise exceptions.ProtocolError(error) from error # Raise exception for response status # (optionally incorporating the response message, if available) self._do_raise_for_status(res) return closables, res.iter_bytes() # type: ignore[return-value] #FIXME: httpx except: for closable in closables: closable.close() raisePKDQ}17ipfshttpclient/http_requests.py"""HTTP client for API requests based on good old requests library This exists mainly for Python 3.5 compatibility. """ import math import http.client import os import typing as ty import urllib.parse import urllib3.exceptions # type: ignore[import] from . import encoding from . import exceptions from .http_common import ( ClientSyncBase, multiaddr_to_url_data, addr_t, auth_t, cookies_t, headers_t, params_t, reqdata_sync_t, timeout_t, Closable, ) PATCH_REQUESTS = (os.environ.get("PY_IPFS_HTTP_CLIENT_PATCH_REQUESTS", "yes").lower() not in ("false", "no")) if PATCH_REQUESTS: from . import requests_wrapper as requests elif not ty.TYPE_CHECKING: # pragma: no cover (always enabled in production) import requests def map_args_to_requests( *, auth: auth_t = None, cookies: cookies_t = None, headers: headers_t = None, params: params_t = None, timeout: timeout_t = None ) -> ty.Dict[str, ty.Any]: kwargs = {} # type: ty.Dict[str, ty.Any] if auth is not None: kwargs["auth"] = auth if cookies is not None: kwargs["cookies"] = cookies if headers is not None: kwargs["headers"] = headers if timeout is not None: if isinstance(timeout, tuple): timeout_ = ( timeout[0] if timeout[0] < math.inf else None, timeout[1] if timeout[1] < math.inf else None, ) # type: ty.Union[ty.Optional[float], ty.Tuple[ty.Optional[float], ty.Optional[float]]] else: timeout_ = timeout if timeout < math.inf else None kwargs["timeout"] = timeout_ if params is not None: kwargs["params"] = {} for name, value in params: if name not in kwargs["params"]: kwargs["params"][name] = value elif not isinstance(kwargs["params"][name], list): kwargs["params"][name] = [kwargs["params"][name], value] else: kwargs["params"][name].append(value) return kwargs class ClientSync(ClientSyncBase[requests.Session]): # type: ignore[name-defined] __slots__ = ("_base_url", "_default_timeout", "_request_proxies", "_session_props") #_base_url: str #_default_timeout: timeout_t #_request_proxies: ty.Optional[ty.Dict[str, str]] #_session_props: ty.Dict[str, ty.Any] def _init(self, addr: addr_t, base: str, *, # type: ignore[no-any-unimported] auth: auth_t, cookies: cookies_t, headers: headers_t, params: params_t, timeout: timeout_t) -> None: self._base_url, uds_path, family, host_numeric = multiaddr_to_url_data(addr, base) self._session_props = map_args_to_requests( auth=auth, cookies=cookies, headers=headers, params=params, ) self._default_timeout = timeout if PATCH_REQUESTS: # pragma: no branch (always enabled in production) self._session_props["family"] = family # Ensure that no proxy lookups are done for the UDS pseudo-hostname # # I'm well aware of the `.proxies` attribute of the session object: As it turns out, # setting *that* attribute will *not* bypass system proxy resolution – only the # per-request keyword-argument can do *that*…! self._request_proxies = None # type: ty.Optional[ty.Dict[str, str]] if uds_path: self._request_proxies = { "no_proxy": urllib.parse.quote(uds_path, safe=""), } def _make_session(self) -> requests.Session: # type: ignore[name-defined] session = requests.Session() # type: ignore[attr-defined] try: for name, value in self._session_props.items(): setattr(session, name, value) return session # It is very unlikely that this would ever error, but if it does try our # best to prevent a leak except: # pragma: no cover session.close() raise def _do_raise_for_status(self, response: requests.Request) -> None: # type: ignore[name-defined] try: response.raise_for_status() except requests.exceptions.HTTPError as error: # type: ignore[attr-defined] content = [] try: decoder = encoding.get_encoding("json") for chunk in response.iter_content(chunk_size=None): content += list(decoder.parse_partial(chunk)) content += list(decoder.parse_finalize()) except exceptions.DecodingError: pass # If we have decoded an error response from the server, # use that as the exception message; otherwise, just pass # the exception on to the caller. if len(content) == 1 \ and isinstance(content[0], dict) \ and "Message" in content[0]: msg = content[0]["Message"] raise exceptions.ErrorResponse(msg, error) from error else: raise exceptions.StatusError(error) from error def _request( self, method: str, path: str, params: ty.Sequence[ty.Tuple[str, str]], *, auth: auth_t, data: reqdata_sync_t, headers: headers_t, timeout: timeout_t, chunk_size: ty.Optional[int] ) -> ty.Tuple[ty.List[Closable], ty.Generator[bytes, ty.Any, ty.Any]]: # Ensure path is relative so that it is resolved relative to the base while path.startswith("/"): path = path[1:] url = urllib.parse.urljoin(self._base_url, path) try: # Determine session object to use closables, session = self._access_session() # Do HTTP request (synchronously) and map exceptions try: res = session.request( method=method, url=url, **map_args_to_requests( params=params, auth=auth, headers=headers, timeout=(timeout if timeout is not None else self._default_timeout), ), proxies=self._request_proxies, data=data, stream=True, ) closables.insert(0, res) except (requests.ConnectTimeout, requests.Timeout) as error: # type: ignore[attr-defined] raise exceptions.TimeoutError(error) from error except requests.ConnectionError as error: # type: ignore[attr-defined] # Report protocol violations separately # # This used to happen because requests wouldn't catch # `http.client.HTTPException` at all, now we recreate # this behaviour manually if we detect it. if isinstance(error.args[0], urllib3.exceptions.ProtocolError): raise exceptions.ProtocolError(error.args[0]) from error.args[0] raise exceptions.ConnectionError(error) from error # Looks like the following error doesn't happen anymore with modern requests? except http.client.HTTPException as error: # pragma: no cover raise exceptions.ProtocolError(error) from error # Raise exception for response status # (optionally incorporating the response message, if available) self._do_raise_for_status(res) return closables, res.iter_content(chunk_size=chunk_size) except: for closable in closables: closable.close() raisePKPsHNHNipfshttpclient/multipart.py"""HTTP :mimetype:`multipart/*`-encoded file streaming. """ import abc import collections.abc import io import os import stat import typing as ty import urllib.parse import uuid from . import filescanner from . import utils if ty.TYPE_CHECKING: #COMPAT: Py3.7- import typing_extensions as ty_ext else: from . import utils as ty_ext gen_bytes_t = ty.Generator[bytes, ty.Any, ty.Any] match_spec_t = ty.Optional[filescanner.match_spec_t[ty.AnyStr]] default_chunk_size = io.DEFAULT_BUFFER_SIZE def content_disposition_headers(filename: str, disptype: str = "form-data; name=\"file\"") \ -> ty.Dict[str, str]: """Returns a dict containing the MIME content-disposition header for a file. .. code-block:: python >>> content_disposition_headers('example.txt') {'Content-Disposition': 'form-data; filename="example.txt"'} >>> content_disposition_headers('example.txt', 'attachment') {'Content-Disposition': 'attachment; filename="example.txt"'} Parameters ---------- filename : str Filename to retrieve the MIME content-disposition for disptype : str Rhe disposition type to use for the file """ disp = '{0}; filename="{1}"'.format( disptype, urllib.parse.quote(filename, safe='') ) return {'Content-Disposition': disp} def content_type_headers(filename: str, content_type: ty.Optional[str] = None) \ -> ty.Dict[str, str]: """Returns a dict with the content-type header for a file. Guesses the mimetype for a filename and returns a dict containing the content-type header. .. code-block:: python >>> content_type_headers('example.txt') {'Content-Type': 'text/plain'} >>> content_type_headers('example.jpeg') {'Content-Type': 'image/jpeg'} >>> content_type_headers('example') {'Content-Type': 'application/octet-stream'} Parameters ---------- filename Filename to guess the content-type for content_type The Content-Type to use; if not set a content type will be guessed """ return {'Content-Type': content_type if content_type else utils.guess_mimetype(filename)} def multipart_content_type_headers(boundary: str, subtype: str = 'mixed') -> ty.Dict[str, str]: """Creates a MIME multipart header with the given configuration. Returns a dict containing a MIME multipart header with the given boundary. .. code-block:: python >>> multipart_content_type_headers('8K5rNKlLQVyreRNncxOTeg') {'Content-Type': 'multipart/mixed; boundary="8K5rNKlLQVyreRNncxOTeg"'} >>> multipart_content_type_headers('8K5rNKlLQVyreRNncxOTeg', 'alt') {'Content-Type': 'multipart/alt; boundary="8K5rNKlLQVyreRNncxOTeg"'} Parameters ---------- boundary The content delimiter to put into the header subtype The subtype in :mimetype:`multipart/*`-domain to put into the header """ ctype = 'multipart/{}; boundary="{}"'.format( subtype, boundary ) return {'Content-Type': ctype} class StreamBase(metaclass=abc.ABCMeta): """Generator that encodes multipart/form-data. An abstract buffered generator class which encodes :mimetype:`multipart/form-data`. Parameters ---------- name The name of the file to encode chunk_size The maximum size that any single file chunk may have in bytes """ __slots__ = ("chunk_size", "name", "_boundary", "_headers") #chunk_size: int #name: str #_boundry: str #_headers: ty.Dict[str, str] def __init__(self, name: str, chunk_size: int = default_chunk_size) -> None: self.chunk_size = chunk_size self.name = name self._boundary = uuid.uuid4().hex self._headers = content_disposition_headers(name) self._headers.update(multipart_content_type_headers(self._boundary, subtype='form-data')) super().__init__() def headers(self) -> ty.Dict[str, str]: return self._headers.copy() @abc.abstractmethod def _body(self) -> gen_bytes_t: """Yields the body of this stream with chunks of undefined size""" def body(self) -> gen_bytes_t: """Yields the body of this stream. """ # Cap all returned body chunks to the given chunk size yield from self._gen_chunks(self._body()) def _gen_headers(self, headers: ty.Dict[str, str]) -> gen_bytes_t: """Yields the HTTP header text for some content Parameters ---------- headers The headers to yield """ for name, value in sorted(headers.items(), key=lambda i: i[0]): yield b"%s: %s\r\n" % (name.encode("ascii"), value.encode("utf-8")) yield b"\r\n" def _gen_chunks(self, gen: ty.Iterable[bytes]) -> gen_bytes_t: """Generates byte chunks of a given size. Takes a bytes generator and yields chunks of a maximum of ``chunk_size`` bytes. Parameters ---------- gen The bytes generator that produces the bytes """ for data in gen: #PERF: This is zero-copy if `len(data) <= self.chunk_size` for offset in range(0, len(data), self.chunk_size): yield data[offset:(self.chunk_size + offset)] def _gen_item_start(self) -> gen_bytes_t: """Yields the body section for the content. """ yield b"--%s\r\n" % (self._boundary.encode("ascii")) def _gen_item_end(self) -> gen_bytes_t: """Yields the body section for the content. """ yield b"\r\n" def _gen_end(self) -> gen_bytes_t: """Yields the closing text of a multipart envelope.""" yield b'--%s--\r\n' % (self._boundary.encode("ascii")) # mypy sucks… :-(gh/python/mypy#8705) class _StreamFileMixinProto(ty_ext.Protocol): @property def chunk_size(self) -> int: ... def _gen_headers(self, headers: ty.Dict[str, str]) -> gen_bytes_t: ... def _gen_item_start(self) -> gen_bytes_t: ... def _gen_item_end(self) -> gen_bytes_t: ... def _gen_file_start(self: "_StreamFileMixinProto", filename: str, file_location: ty.Optional[str] = None, content_type: ty.Optional[str] = None) -> gen_bytes_t: ... def _gen_file_chunks(self: "_StreamFileMixinProto", file: ty.IO[bytes]) -> gen_bytes_t: ... def _gen_file_end(self: "_StreamFileMixinProto") -> gen_bytes_t: ... class StreamFileMixin: __slots__ = () def _gen_file(self: _StreamFileMixinProto, filename: str, file_location: ty.Optional[str] = None, file: ty.Optional[ty.IO[bytes]] = None, content_type: ty.Optional[str] = None) -> gen_bytes_t: """Yields the entire contents of a file. Parameters ---------- filename Filename of the file being opened and added to the HTTP body file_location Full path to the file being added, including the filename file The binary file-like object whose contents should be streamed No contents will be streamed if this is ``None``. content_type The Content-Type of the file; if not set a value will be guessed """ yield from self._gen_file_start(filename, file_location, content_type) if file: yield from self._gen_file_chunks(file) yield from self._gen_file_end() def _gen_file_start(self: _StreamFileMixinProto, filename: str, file_location: ty.Optional[str] = None, content_type: ty.Optional[str] = None) -> gen_bytes_t: """Yields the opening text of a file section in multipart HTTP. Parameters ---------- filename Filename of the file being opened and added to the HTTP body file_location Full path to the file being added, including the filename content_type The Content-Type of the file; if not set a value will be guessed """ yield from self._gen_item_start() headers = content_disposition_headers(filename.replace(os.sep, "/")) headers.update(content_type_headers(filename, content_type)) if file_location and os.path.isabs(file_location): headers.update({"Abspath": file_location}) yield from self._gen_headers(headers) def _gen_file_chunks(self: _StreamFileMixinProto, file: ty.IO[bytes]) -> gen_bytes_t: """Yields chunks of a file. Parameters ---------- fp The file to break into chunks (must be an open file or have the ``readinto`` method) """ while True: buf = file.read(self.chunk_size) if len(buf) < 1: break yield buf def _gen_file_end(self: _StreamFileMixinProto) -> gen_bytes_t: """Yields the end text of a file section in HTTP multipart encoding.""" return self._gen_item_end() class FilesStream(StreamBase, StreamFileMixin): """Generator that encodes multiples files into HTTP multipart. A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of :class:`~ipfsapi.multipart.StreamBase`. Parameters ---------- files The name, file object or file descriptor of the file to encode; may also be a list of several items to allow for more efficient batch processing chunk_size The maximum size that any single file chunk may have in bytes """ __slots__ = ("files",) #files: ty.Union[utils.clean_file_t, ty.Iterable[utils.clean_file_t]] def __init__(self, files: ty.Union[utils.clean_file_t, ty.Iterable[utils.clean_file_t]], name: str = "files", chunk_size: int = default_chunk_size) -> None: self.files = utils.clean_files(files) super().__init__(name, chunk_size=chunk_size) def _body(self) -> gen_bytes_t: """Yields the body of the buffered file.""" for file, need_close in self.files: try: try: file_location = file.name # type: ty.Optional[str] filename = os.path.basename(file.name) # type: str except AttributeError: file_location = None filename = '' yield from self._gen_file(filename, file_location, file) finally: if need_close: file.close() yield from self._gen_end() class DirectoryStream(StreamBase, StreamFileMixin, ty.Generic[ty.AnyStr]): """Generator that encodes a directory into HTTP multipart. A buffered generator that encodes an array of files as :mimetype:`multipart/form-data`. This is a concrete implementation of :class:`~ipfshttpclient.multipart.StreamBase`. Parameters ---------- directory The filepath or file descriptor of the directory to encode File descriptors are only supported on Unix. dirpath The path to the directory being uploaded, if this is absolute it will be included in a header for each emitted file and enables use of the no-copy filestore facilities If the *wrap_with_directory* attribute is ``True`` during upload the string ``dirpath.name if dirpath else '_'`` will be visible as the name of the uploaded directory within its wrapper. chunk_size The maximum size that any single file chunk may have in bytes patterns One or several glob patterns or compiled regular expression objects used to determine which files to upload Only files or directories matched by any of these patterns will be uploaded. If a directory is not matched directly but contains at least one file or directory below it that is, it will be included in the upload as well but will not include other items. If a directory matches any of the given patterns and *recursive* is then it, as well as all other files and directories below it, will be included as well. period_special Whether a leading period in file/directory names should be matchable by ``*``, ``?`` and ``[…]`` – traditionally they are not, but many modern shells allow one to disable this behaviour """ __slots__ = ("abspath", "follow_symlinks", "scanner") #abspath: ty.Optional[ty.AnyStr] #follow_symlinks: bool #scanner: filescanner.walk[ty.AnyStr] def __init__(self, directory: ty.Union[ty.AnyStr, utils.PathLike[ty.AnyStr], int], *, chunk_size: int = default_chunk_size, follow_symlinks: bool = False, patterns: match_spec_t[ty.AnyStr] = None, period_special: bool = True, recursive: bool = False) -> None: self.follow_symlinks = follow_symlinks if not isinstance(directory, int): directory = utils.convert_path(directory) # Create file scanner from parameters self.scanner = filescanner.walk( directory, patterns, follow_symlinks=follow_symlinks, period_special=period_special, recursive=recursive ) # type: filescanner.walk[ty.AnyStr] # Figure out the absolute path of the directory added self.abspath = None # type: ty.Optional[ty.AnyStr] if not isinstance(directory, int): self.abspath = os.path.abspath(utils.convert_path(directory)) # Figure out basename of the containing directory # (normpath is an acceptable approximation here) basename = "_" # type: ty.Union[str, bytes] if not isinstance(directory, int): basename = os.fsdecode(os.path.basename(os.path.normpath(directory))) super().__init__(os.fsdecode(basename), chunk_size=chunk_size) def _body(self) -> gen_bytes_t: """Streams the contents of the selected directory as binary chunks.""" try: for type, path, relpath, name, parentfd in self.scanner: relpath_unicode = os.fsdecode(relpath).replace(os.path.sep, "/") short_path = self.name + (("/" + relpath_unicode) if relpath_unicode != "." else "") if type is filescanner.FSNodeType.FILE: try: # Only regular files and directories can be uploaded if parentfd is not None: stat_data = os.stat(name, dir_fd=parentfd, follow_symlinks=self.follow_symlinks) else: stat_data = os.stat(path, follow_symlinks=self.follow_symlinks) if not stat.S_ISREG(stat_data.st_mode): continue absolute_path = None # type: ty.Optional[str] if self.abspath is not None: absolute_path = os.fsdecode(os.path.join(self.abspath, relpath)) if parentfd is None: f_path_or_desc = path # type: ty.Union[ty.AnyStr, int] else: f_path_or_desc = os.open(name, os.O_RDONLY | os.O_CLOEXEC, dir_fd=parentfd) # Stream file to client with open(f_path_or_desc, "rb") as file: yield from self._gen_file(short_path, absolute_path, file) except OSError as e: print(e) # File might have disappeared between `os.walk()` and `open()` pass elif type is filescanner.FSNodeType.DIRECTORY: # Generate directory as special empty file yield from self._gen_file(short_path, content_type="application/x-directory") yield from self._gen_end() finally: self.scanner.close() class BytesFileStream(FilesStream): """A buffered generator that encodes bytes as file in :mimetype:`multipart/form-data`. Parameters ---------- data The binary data to stream to the daemon name The filename to report to the daemon for this upload chunk_size The maximum size of a single data chunk """ __slots__ = ("data",) #data: ty.Iterable[bytes] def __init__(self, data: ty.Union[bytes, gen_bytes_t], name: str = "bytes", *, chunk_size: int = default_chunk_size) -> None: super().__init__([], name=name, chunk_size=chunk_size) if not isinstance(data, bytes): self.data = data # type: ty.Iterable[bytes] else: self.data = (data,) def body(self) -> gen_bytes_t: """Yields the encoded body.""" yield from self._gen_file_start(self.name) yield from self._gen_chunks(self.data) yield from self._gen_file_end() yield from self._gen_end() def stream_files(files: ty.Union[utils.clean_file_t, ty.Iterable[utils.clean_file_t]], *, chunk_size: int = default_chunk_size) -> ty.Tuple[gen_bytes_t, ty.Dict[str, str]]: """Gets a buffered generator for streaming files. Returns a buffered generator which encodes a file or list of files as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- files The file(s) to stream chunk_size Maximum size of each stream chunk """ stream = FilesStream(files, chunk_size=chunk_size) return stream.body(), stream.headers() def stream_directory(directory: ty.Union[ty.AnyStr, utils.PathLike[ty.AnyStr], int], *, chunk_size: int = default_chunk_size, follow_symlinks: bool = False, patterns: match_spec_t[ty.AnyStr] = None, period_special: bool = True, recursive: bool = False ) -> ty.Tuple[gen_bytes_t, ty.Dict[str, str]]: """Returns buffered generator yielding the contents of a directory Returns a buffered generator which encodes a directory as :mimetype:`multipart/form-data` with the corresponding headers. For the meaning of these parameters see the description of :class:`DirectoryStream`. """ stream = DirectoryStream(directory, chunk_size=chunk_size, follow_symlinks=follow_symlinks, period_special=period_special, patterns=patterns, recursive=recursive) return stream.body(), stream.headers() _filepaths_t = ty.Union[utils.path_t, int, io.IOBase] filepaths_t = ty.Union[_filepaths_t, ty.Iterable[_filepaths_t]] def stream_filesystem_node( filepaths: ty.Union[ ty.AnyStr, utils.PathLike[ty.AnyStr], int, ty.Iterable[ty.Union[ty.AnyStr, utils.PathLike[ty.AnyStr]]] ], *, chunk_size: int = default_chunk_size, follow_symlinks: bool = False, patterns: match_spec_t[ty.AnyStr] = None, period_special: bool = True, recursive: bool = False ) -> ty.Tuple[gen_bytes_t, ty.Dict[str, str], bool]: """Gets a buffered generator for streaming either files or directories. Returns a buffered generator which encodes the file or directory at the given path as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- filepaths The filepath of a single directory or one or more files to stream chunk_size Maximum size of each stream chunk follow_symlinks Follow symbolic links when recursively scanning directories? (directories only) period_special Treat files and directories with a leading period character (“dot-files”) specially in glob patterns? (directories only) If this is set these files will only be matched by path labels whose initial character is a period as well. patterns Single *glob* pattern or list of *glob* patterns and compiled regular expressions to match the paths of files and directories to be added to IPFS (directories only) recursive Scan directories recursively for additional files? (directories only) """ is_dir = False if isinstance(filepaths, utils.path_types): is_dir = os.path.isdir(utils.convert_path(filepaths)) elif isinstance(filepaths, int): import stat is_dir = stat.S_ISDIR(os.fstat(filepaths).st_mode) if is_dir: assert not isinstance(filepaths, collections.abc.Iterable) \ or isinstance(filepaths, (str, bytes)) return stream_directory( filepaths, chunk_size=chunk_size, period_special=period_special, patterns=patterns, recursive=recursive, ) + (True,) else: return stream_files(filepaths, chunk_size=chunk_size) + (False,) def stream_bytes( data: ty.Union[bytes, gen_bytes_t], *, chunk_size: int = default_chunk_size ) -> ty.Tuple[gen_bytes_t, ty.Dict[str, str]]: """Gets a buffered generator for streaming binary data. Returns a buffered generator which encodes binary data as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- data The data bytes to stream chunk_size The maximum size of each stream chunk """ stream = BytesFileStream(data, chunk_size=chunk_size) return stream.body(), stream.headers() def stream_text( text: ty.Union[str, ty.Iterable[str]], *, chunk_size: int = default_chunk_size ) -> ty.Tuple[gen_bytes_t, ty.Dict[str, str]]: """Gets a buffered generator for streaming text. Returns a buffered generator which encodes a string as :mimetype:`multipart/form-data` with the corresponding headers. Parameters ---------- text The data bytes to stream chunk_size The maximum size of each stream chunk """ if not isinstance(text, str): def binary_stream() -> gen_bytes_t: for item in text: yield item.encode("utf-8") data = binary_stream() # type: ty.Union[gen_bytes_t, bytes] else: data = text.encode("utf-8") return stream_bytes(data, chunk_size=chunk_size) PKQ lW % %"ipfshttpclient/requests_wrapper.py# type: ignore """Exposes the full ``requests`` HTTP library API, while adding an extra ``family`` parameter to all HTTP request operations that may be used to restrict the address family used when resolving a domain-name to an IP address. """ import socket import urllib.parse import requests import requests.adapters import urllib3 import urllib3.connection import urllib3.exceptions import urllib3.poolmanager import urllib3.util.connection AF2NAME = { int(socket.AF_INET): "ip4", int(socket.AF_INET6): "ip6", } if hasattr(socket, "AF_UNIX"): AF2NAME[int(socket.AF_UNIX)] = "unix" NAME2AF = {name: af for af, name in AF2NAME.items()} # This function is copied from urllib3/util/connection.py (that in turn copied # it from socket.py in the Python 2.7 standard library test suite) and accepts # an extra `family` parameter that specifies the allowed address families for # name resolution. # # The entire remainder of this file after this only exists to ensure that this # `family` parameter is exposed all the way up to request's `Session` interface, # storing it as part of the URL scheme while traversing most of the layers. def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None, socket_options=None, family=socket.AF_UNSPEC): host, port = address if host.startswith('['): host = host.strip('[]') err = None if not family or family == socket.AF_UNSPEC: family = urllib3.util.connection.allowed_gai_family() # Extension for Unix domain sockets if hasattr(socket, "AF_UNIX") and family == socket.AF_UNIX: gai_result = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", host)] else: gai_result = socket.getaddrinfo(host, port, family, socket.SOCK_STREAM) for res in gai_result: af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) # If provided, set socket level options before connecting. if socket_options is not None and family != getattr(socket, "AF_UNIX", NotImplemented): for opt in socket_options: sock.setsockopt(*opt) if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) return sock except OSError as e: err = e if sock is not None: sock.close() sock = None if err is not None: raise err raise OSError("getaddrinfo returns an empty list") # Override the `urllib3` low-level Connection objects that do the actual work # of speaking HTTP def _kw_scheme_to_family(kw, base_scheme): family = socket.AF_UNSPEC scheme = kw.pop("scheme", None) if isinstance(scheme, str): parts = scheme.rsplit("+", 1) if len(parts) == 2 and parts[0] == base_scheme: family = NAME2AF.get(parts[1], family) return family class ConnectionOverrideMixin: def _new_conn(self): extra_kw = { "family": self.family } if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: dns_host = getattr(self, "_dns_host", self.host) if hasattr(socket, "AF_UNIX") and extra_kw["family"] == socket.AF_UNIX: dns_host = urllib.parse.unquote(dns_host) conn = create_connection( (dns_host, self.port), self.timeout, **extra_kw) except socket.timeout: raise urllib3.exceptions.ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) except OSError as e: raise urllib3.exceptions.NewConnectionError( self, "Failed to establish a new connection: %s" % e) return conn class HTTPConnection(ConnectionOverrideMixin, urllib3.connection.HTTPConnection): def __init__(self, *args, **kw): self.family = _kw_scheme_to_family(kw, "http") super().__init__(*args, **kw) class HTTPSConnection(ConnectionOverrideMixin, urllib3.connection.HTTPSConnection): def __init__(self, *args, **kw): self.family = _kw_scheme_to_family(kw, "https") super().__init__(*args, **kw) # Override the higher-level `urllib3` ConnectionPool objects that instantiate # one or more Connection objects and dispatch work between them class HTTPConnectionPool(urllib3.HTTPConnectionPool): ConnectionCls = HTTPConnection class HTTPSConnectionPool(urllib3.HTTPSConnectionPool): ConnectionCls = HTTPSConnection # Override the highest-level `urllib3` PoolManager to also properly support the # address family extended scheme values in URLs and pass these scheme values on # to the individual ConnectionPool objects class PoolManager(urllib3.PoolManager): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Additionally to adding our variant of the usual HTTP and HTTPS # pool classes, also add these for some variants of the default schemes # that are limited to some specific address family only self.pool_classes_by_scheme = {} for scheme, ConnectionPool in (("http", HTTPConnectionPool), ("https", HTTPSConnectionPool)): self.pool_classes_by_scheme[scheme] = ConnectionPool for name in AF2NAME.values(): self.pool_classes_by_scheme["{0}+{1}".format(scheme, name)] = ConnectionPool self.key_fn_by_scheme["{0}+{1}".format(scheme, name)] = self.key_fn_by_scheme[scheme] # These next two are only required to ensure that our custom `scheme` values # will be passed down to the `*ConnectionPool`s and finally to the actual # `*Connection`s as parameter def _new_pool(self, scheme, host, port, request_context=None): # Copied from `urllib3` to *not* surpress the `scheme` parameter pool_cls = self.pool_classes_by_scheme[scheme] if request_context is None: request_context = self.connection_pool_kw.copy() for key in ('host', 'port'): request_context.pop(key, None) if scheme == "http" or scheme.startswith("http+"): for kw in urllib3.poolmanager.SSL_KEYWORDS: request_context.pop(kw, None) return pool_cls(host, port, **request_context) def connection_from_pool_key(self, pool_key, request_context=None): # Copied from `urllib3` so that we continue to ensure that this will # call `_new_pool` with self.pools.lock: pool = self.pools.get(pool_key) if pool: return pool scheme = request_context['scheme'] host = request_context['host'] port = request_context['port'] pool = self._new_pool(scheme, host, port, request_context=request_context) self.pools[pool_key] = pool return pool # Override the lower-level `requests` adapter that invokes the `urllib3` # PoolManager objects class HTTPAdapter(requests.adapters.HTTPAdapter): def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # save these values for pickling (copied from `requests`) self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs) # Override the highest-level `requests` Session object to accept the `family` # parameter for any request and encode its value as part of the URL scheme # when passing it down to the adapter class Session(requests.Session): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.family = socket.AF_UNSPEC # Additionally to mounting our variant of the usual HTTP and HTTPS # adapter, also mount it for some variants of the default schemes that # are limited to some specific address family only adapter = HTTPAdapter() for scheme in ("http", "https"): self.mount("{0}://".format(scheme), adapter) for name in AF2NAME.values(): self.mount("{0}+{1}://".format(scheme, name), adapter) def request(self, method, url, *args, **kwargs): family = kwargs.pop("family", self.family) if family != socket.AF_UNSPEC: # Inject provided address family value as extension to scheme url = urllib.parse.urlparse(url) url = url._replace(scheme="{0}+{1}".format(url.scheme, AF2NAME[int(family)])) url = url.geturl() return super().request(method, url, *args, **kwargs) session = Session # Import other `requests` stuff to make the top-level API of this more compatible from requests import ( __title__, __description__, __url__, __version__, __build__, __author__, __author_email__, __license__, __copyright__, __cake__, exceptions, utils, packages, codes, Request, Response, PreparedRequest, RequestException, Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError, FileModeWarning, ConnectTimeout, ReadTimeout ) # Re-implement the top-level “session-less” API def request(method, url, **kwargs): with Session() as session: return session.request(method=method, url=url, **kwargs) def get(url, params=None, **kwargs): kwargs.setdefault('allow_redirects', True) return request('get', url, params=params, **kwargs) def options(url, **kwargs): kwargs.setdefault('allow_redirects', True) return request('options', url, **kwargs) def head(url, **kwargs): kwargs.setdefault('allow_redirects', False) return request('head', url, **kwargs) def post(url, data=None, json=None, **kwargs): return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): return request('delete', url, **kwargs) PK%Q   ipfshttpclient/utils.py"""A module to handle generic operations. """ import mimetypes import os import pathlib import sys import typing as ty from functools import wraps if ty.TYPE_CHECKING: import typing_extensions as ty_ext else: ty_ext = ty if sys.version_info >= (3, 8): #PY38+ Literal = ty_ext.Literal Protocol = ty_ext.Protocol Literal_True = ty.Literal[True] Literal_False = ty.Literal[False] else: #PY37- class Literal(ty.Generic[ty.T]): ... class Protocol: ... Literal_True = Literal_False = bool if sys.version_info >= (3, 6): #PY36+ # `os.PathLike` only has a type param while type checking if ty.TYPE_CHECKING: PathLike = os.PathLike PathLike_str = os.PathLike[str] PathLike_bytes = os.PathLike[bytes] else: class PathLike(Protocol, ty.Generic[ty.AnyStr]): def __fspath__(self) -> ty.AnyStr: ... PathLike_str = PathLike_bytes = os.PathLike path_str_t = ty.Union[str, PathLike_str] path_bytes_t = ty.Union[bytes, PathLike_bytes] path_t = ty.Union[path_str_t, path_bytes_t] AnyPath = ty.TypeVar("AnyPath", str, PathLike_str, bytes, PathLike_bytes) path_types = (str, bytes, os.PathLike,) path_obj_types = (os.PathLike,) @ty.overload def convert_path(path: ty.AnyStr) -> ty.AnyStr: ... @ty.overload def convert_path(path: PathLike_str) -> PathLike_str: ... @ty.overload def convert_path(path: PathLike_bytes) -> PathLike_bytes: ... @ty.overload def convert_path(path: AnyPath) -> AnyPath: ... def convert_path(path: AnyPath) -> AnyPath: # Not needed since all system APIs also accept an `os.PathLike` return path else: #PY35 class PathLike(pathlib.PurePath, ty.Generic[ty.AnyStr]): ... path_str_t = ty.Union[str, pathlib.PurePath] path_bytes_t = ty.Union[bytes] path_t = ty.Union[path_str_t, path_bytes_t] AnyPath = ty.TypeVar("AnyPath", str, pathlib.PurePath, bytes) path_types = (str, bytes, pathlib.PurePath,) path_obj_types = (pathlib.PurePath,) # Independently maintained forward-port of `pathlib` for Py27 and others try: import pathlib2 path_types += (pathlib2.PurePath,) path_obj_types += (pathlib2.PurePath,) except ImportError: pass @ty.overload def convert_path(path: path_str_t) -> str: ... @ty.overload def convert_path(path: path_bytes_t) -> bytes: ... def convert_path(path: path_t) -> ty.Union[str, bytes]: # `pathlib`'s PathLike objects need to be treated specially and # converted to strings when interacting with system APIs return str(path) if isinstance(path, path_obj_types) else path # work around GH/mypy/mypy#731: no recursive structural types yet json_primitive_t = ty.Union[bool, float, int, str] json_value_t = ty.Union[ json_primitive_t, "json_list_t", "json_dict_t" ] class json_list_t(ty.List[json_value_t]): pass class json_dict_t(ty.Dict[str, json_value_t]): pass def maybe_fsencode(val: str, ref: ty.AnyStr) -> ty.AnyStr: """Encodes the string *val* using the system filesystem encoding if *ref* is of type :type:`bytes`""" if isinstance(ref, bytes): return os.fsencode(val) else: return val def guess_mimetype(filename: str) -> str: """Guesses the mimetype of a file based on the given ``filename``. .. code-block:: python >>> guess_mimetype('example.txt') 'text/plain' >>> guess_mimetype('/foo/bar/example') 'application/octet-stream' Parameters ---------- filename The file name or path for which the mimetype is to be guessed """ fn = os.path.basename(filename) return mimetypes.guess_type(fn)[0] or 'application/octet-stream' clean_file_t = ty.Union[path_t, ty.IO[bytes], int] def clean_file(file: clean_file_t) -> ty.Tuple[ty.IO[bytes], bool]: """Returns a tuple containing a file-like object and a close indicator This ensures the given file is opened and keeps track of files that should be closed after use (files that were not open prior to this function call). Raises ------ OSError Accessing the given file path failed Parameters ---------- file A filepath or file-like object that may or may not need to be opened """ if isinstance(file, int): return os.fdopen(file, 'rb', closefd=False), True elif not hasattr(file, 'read'): file = ty.cast(path_t, file) # Cannot be ty.IO[bytes] without `.read()` return open(convert_path(file), 'rb'), True else: file = ty.cast(ty.IO[bytes], file) # Must be ty.IO[bytes] return file, False def clean_files(files: ty.Union[clean_file_t, ty.Iterable[clean_file_t]]) \ -> ty.Generator[ty.Tuple[ty.IO[bytes], bool], ty.Any, ty.Any]: """Generates tuples with a file-like object and a close indicator This is a generator of tuples, where the first element is the file object and the second element is a boolean which is True if this module opened the file (and thus should close it). Raises ------ OSError Accessing the given file path failed Parameters ---------- files Collection or single instance of a filepath and file-like object """ if not isinstance(files, path_types) and not hasattr(files, "read"): for f in ty.cast(ty.Iterable[clean_file_t], files): yield clean_file(f) else: yield clean_file(ty.cast(clean_file_t, files)) T = ty.TypeVar("T") F = ty.TypeVar("F", bound=ty.Callable[..., ty.Dict[str, T]]) class return_field(ty.Generic[T]): """Decorator that returns the given field of a json response. Parameters ---------- field The response field to be returned for all invocations """ __slots__ = ("field",) #field: str def __init__(self, field: str) -> None: self.field = field # type: str def __call__(self, cmd: F) -> ty.Callable[..., T]: """Wraps a command so that only a specified field is returned. Parameters ---------- cmd A command that is intended to be wrapped """ @wraps(cmd) def wrapper(*args: ty.Any, **kwargs: ty.Any) -> T: """Returns the specified field as returned by the wrapped function Parameters ---------- args Positional parameters to pass to the wrapped callable kwargs Named parameter to pass to the wrapped callable """ res = cmd(*args, **kwargs) # type: ty.Dict[str, T] return res[self.field] return wrapperPK%QG%%ipfshttpclient/version.py# _Versioning scheme:_ # The major and minor version of each release correspond to the supported # IPFS daemon version. The revision number will be updated whenever we make # a new release for the `py-ipfs-http-client` for that daemon version. # # Example: The first client version to support the `0.4.x`-series of the IPFS # HTTP API will have version `0.4.0`, the second version will have version # `0.4.1` and so on. When IPFS `0.5.0` is released, the first client version # to support it will also be released as `0.5.0`. __version__ = "0.6.1" PKnQ4n&n&!ipfshttpclient/client/__init__.py"""IPFS API Bindings for Python. Classes: * Client – a TCP client for interacting with an IPFS daemon """ import os import typing as ty import warnings import multiaddr DEFAULT_ADDR = multiaddr.Multiaddr(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_ADDR", '/dns/localhost/tcp/5001/http')) DEFAULT_BASE = str(os.environ.get("PY_IPFS_HTTP_CLIENT_DEFAULT_BASE", 'api/v0')) VERSION_MINIMUM = "0.4.22" VERSION_BLACKLIST = [] VERSION_MAXIMUM = "0.7.0" from . import bitswap from . import block from . import bootstrap from . import config from . import dag from . import dht from . import files from . import key from . import miscellaneous from . import name from . import object from . import pin from . import pubsub from . import repo #TODO: `from . import stats` from . import swarm from . import unstable from .. import encoding, exceptions, http, multipart, utils def assert_version(version: str, minimum: str = VERSION_MINIMUM, maximum: str = VERSION_MAXIMUM, blacklist: ty.Iterable[str] = VERSION_BLACKLIST) -> None: """Make sure that the given daemon version is supported by this client version. Raises ------ ~ipfshttpclient.exceptions.VersionMismatch Parameters ---------- version The actual version of an IPFS daemon minimum The minimal IPFS daemon version allowed (inclusive) maximum The maximum IPFS daemon version allowed (exclusive) blacklist Versions explicitly disallowed even if in range *minimum* – *maximum* """ # Convert version strings to integer tuples version = list(map(int, version.split('-', 1)[0].split('.'))) minimum = list(map(int, minimum.split('-', 1)[0].split('.'))) maximum = list(map(int, maximum.split('-', 1)[0].split('.'))) if minimum > version or version >= maximum: raise exceptions.VersionMismatch(version, minimum, maximum) for blacklisted in blacklist: blacklisted = list(map(int, blacklisted.split('-', 1)[0].split('.'))) if version == blacklisted: raise exceptions.VersionMismatch(version, minimum, maximum) def connect( addr: http.addr_t = DEFAULT_ADDR, base: str = DEFAULT_BASE, *, chunk_size: int = multipart.default_chunk_size, offline: bool = False, session: bool = False, auth: http.auth_t = None, cookies: http.cookies_t = None, headers: http.headers_t = {}, timeout: http.timeout_t = 120, # Backward-compat username: ty.Optional[str] = None, password: ty.Optional[str] = None ): """Create a new :class:`~ipfshttpclient.Client` instance and connect to the daemon to validate that its version is supported as well as applying any known workarounds for the given daemon version Raises ------ ~ipfshttpclient.exceptions.VersionMismatch ~ipfshttpclient.exceptions.ErrorResponse ~ipfshttpclient.exceptions.ConnectionError ~ipfshttpclient.exceptions.ProtocolError ~ipfshttpclient.exceptions.StatusError ~ipfshttpclient.exceptions.TimeoutError All parameters are identical to those passed to the constructor of the :class:`~ipfshttpclient.Client` class. """ # Create client instance client = Client( addr, base, chunk_size=chunk_size, offline=offline, session=session, auth=auth, cookies=cookies, headers=headers, timeout=timeout, username=username, password=password, ) # Query version number from daemon and validate it assert_version(client.apply_workarounds()["Version"]) return client class Client(files.Base, miscellaneous.Base): """The main IPFS HTTP client class Allows access to an IPFS daemon instance using its HTTP API by exposing an `IPFS Interface Core `__ compatible set of methods. It is possible to instantiate this class directly, using the same parameters as :func:`connect`, to prevent the client from checking for an active and compatible version of the daemon. In general however, calling :func:`connect` should be preferred. In order to reduce latency between individual API calls, this class may keep a pool of TCP connections between this client and the API daemon open between requests. The only caveat of this is that the client object should be closed when it is not used anymore to prevent resource leaks. The easiest way of using this “session management” facility is using a context manager:: with ipfshttpclient.connect() as client: print(client.version()) # These calls… print(client.version()) # …will reuse their TCP connection A client object may be re-opened several times:: client = ipfshttpclient.connect() print(client.version()) # Perform API call on separate TCP connection with client: print(client.version()) # These calls… print(client.version()) # …will share a TCP connection with client: print(client.version()) # These calls… print(client.version()) # …will share a different TCP connection When storing a long-running :class:`Client` object use it like this:: class Consumer: def __init__(self): self._client = ipfshttpclient.connect(session=True) # … other code … def close(self): # Call this when you're done self._client.close() """ # Fix up docstring so that Sphinx doesn't ignore the constructors parameter list __doc__ += "\n\n" + "\n".join(l[1:] for l in base.ClientBase.__init__.__doc__.split("\n")) bitswap = base.SectionProperty(bitswap.Section) block = base.SectionProperty(block.Section) bootstrap = base.SectionProperty(bootstrap.Section) config = base.SectionProperty(config.Section) dag = base.SectionProperty(dag.Section) dht = base.SectionProperty(dht.Section) key = base.SectionProperty(key.Section) name = base.SectionProperty(name.Section) object = base.SectionProperty(object.Section) pin = base.SectionProperty(pin.Section) pubsub = base.SectionProperty(pubsub.Section) repo = base.SectionProperty(repo.Section) swarm = base.SectionProperty(swarm.Section) unstable = base.SectionProperty(unstable.Section) ###################### # SESSION MANAGEMENT # ###################### def __enter__(self): self._client.open_session() return self def __exit__(self, exc_type, exc_value, traceback): self.close() def close(self): """Close any currently open client session and free any associated resources. If there was no session currently open this method does nothing. An open session is not a requirement for using a :class:`~ipfshttpclient.Client` object and as such all method defined on it will continue to work, but a new TCP connection will be established for each and every API call invoked. Such a usage should therefor be avoided and may cause a warning in the future. See the class's description for details. """ self._client.close_session() ########### # HELPERS # ########### def apply_workarounds(self): """Query version information of the referenced daemon and enable any workarounds known for the corresponding version Returns ------- dict The version information returned by the daemon """ version_info = self.version() version = tuple(map(int, version_info["Version"].split('-', 1)[0].split('.'))) self._workarounds.clear() if version < (0, 5): # pragma: no cover (workaround) # Not really a workaround, but make use of HEAD requests on versions # that support them to speed things up if we are not interested in the # response anyways self._workarounds.add("use_http_head_for_no_result") return version_info @utils.return_field('Hash') @base.returns_single_item(dict) def add_bytes(self, data: bytes, **kwargs): """Adds a set of bytes as a file to IPFS. .. code-block:: python >>> client.add_bytes(b"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- data Content to be added as a file Returns ------- str Hash of the added IPFS object """ body, headers = multipart.stream_bytes(data, chunk_size=self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) @utils.return_field('Hash') @base.returns_single_item(dict) def add_str(self, string, **kwargs): """Adds a Python string as a file to IPFS. .. code-block:: python >>> client.add_str(u"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- string : str Content to be added as a file Returns ------- str Hash of the added IPFS object """ body, headers = multipart.stream_text(string, chunk_size=self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) def add_json(self, json_obj, **kwargs): """Adds a json-serializable Python dict as a json file to IPFS. .. code-block:: python >>> client.add_json({'one': 1, 'two': 2, 'three': 3}) 'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob' Parameters ---------- json_obj : dict A json-serializable Python dictionary Returns ------- str Hash of the added IPFS object """ return self.add_bytes(encoding.Json().encode(json_obj), **kwargs) @base.returns_single_item() def get_json(self, cid, **kwargs): """Loads a json object from IPFS. .. code-block:: python >>> client.get_json('QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob') {'one': 1, 'two': 2, 'three': 3} Parameters ---------- cid : Union[str, cid.CIDv0, cid.CIDv1] CID of the IPFS object to load Returns ------- object Deserialized IPFS JSON object value """ return self.cat(cid, decoder='json', **kwargs)PKP^)j33ipfshttpclient/client/base.pyimport functools import sys import typing as ty import multiaddr # type: ignore[import] from . import DEFAULT_ADDR, DEFAULT_BASE from .. import multipart, http, utils if ty.TYPE_CHECKING: import typing_extensions as ty_ext else: ty_ext = utils if "cid" in sys.modules: import cid # type: ignore[import] cid_t = ty.Union[str, cid.CIDv0, cid.CIDv1] elif not ty.TYPE_CHECKING: cid_t = str multiaddr_t = ty.Union[str, multiaddr.Multiaddr] # Alias JSON types json_dict_t = utils.json_dict_t json_list_t = utils.json_list_t json_primitive_t = utils.json_primitive_t json_value_t = utils.json_value_t # The following would be much more useful once GH/python/mypy#4441 is implemented… if ty.TYPE_CHECKING: # Lame workaround for type checkers CommonArgs = ty.Union[bool, http.auth_t, http.cookies_t, http.reqdata_sync_t, http.headers_t, http.timeout_t] elif hasattr(ty, "TypedDict"): # This is what type checkers should actually use CommonArgs = ty.TypedDict("CommonArgs", { "offline": bool, "return_result": bool, "auth": http.auth_t, "cookies": http.cookies_t, "data": http.reqdata_sync_t, "headers": http.headers_t, "timeout": http.timeout_t, }) else: CommonArgs = ty.Dict[str, ty.Any] # work around GH/mypy/mypy#731: no recursive structural types yet response_item_t = ty.Union[ json_primitive_t, "ResponseBase", "_response_item_list_t" ] class _response_item_list_t(ty.List[response_item_t]): pass class ResponseBase(ty.Mapping[str, response_item_t]): """Base class for wrapping IPFS node API responses Original JSON properties are exposed using dict item syntax ``response[key]``. To access the raw parsed JSON object use the :meth:`as_json` method. """ __slots__ = ("_raw",) #_raw: json_dict_t _repr_attr_display = list() # type: ty.Sequence[str] _repr_json_hidden = set() # type: ty.Container[str] def __init__(self, response: json_dict_t): self._raw = response def __getitem__(self, name: str) -> response_item_t: return self._wrap_result(self._raw[name]) @classmethod def _wrap_result(cls, value: json_value_t) -> response_item_t: if isinstance(value, dict): result = ResponseBase(value) # type: response_item_t elif isinstance(value, list): result = _response_item_list_t() # Part of workaround for v in value: result.append(cls._wrap_result(v)) else: result = value return result def __iter__(self) -> ty.Iterator[str]: return iter(self._raw) def __len__(self) -> int: return len(self._raw) def __repr__(self) -> str: attr_str_parts = [] # type: ty.List[str] for name in type(self)._repr_attr_display: attr_str_parts.append("{0}={1!r}".format(name, getattr(self, name))) json_hidden = type(self)._repr_json_hidden attr_json_parts = [] # type: ty.List[str] for name, value in filter(lambda i: i[0] not in json_hidden, self._raw.items()): attr_json_parts.append("{0!r}: {1!r}".format(name, value)) #arg_str: str if attr_str_parts and attr_json_parts: arg_str = "{0}, **{{{1}}}".format(", ".join(attr_str_parts), ", ".join(attr_json_parts)) elif attr_str_parts: arg_str = ", ".join(attr_str_parts) else: arg_str = "{{{0}}}".format(", ".join(attr_json_parts)) return "<{0.__module__}.{0.__qualname__}: {1}>".format(type(self), arg_str) def as_json(self) -> json_dict_t: """Returns the original parsed JSON object as returned by the remote IPFS node In general, try to avoid modifying the returned dictionary if plan on subsequently using this response object. """ return self._raw T = ty.TypeVar("T") R = ty.TypeVar("R") wrap_cb_t = ty.Callable[[T], R] def ident(value: T) -> R: return ty.cast(R, value) class ResponseWrapIterator(ty.Generic[T, R]): __slots__ = ("_inner", "_item_wrap_cb") #_inner: http.StreamDecodeIteratorSync[T] #_item_wrap_cb: wrap_cb_t[T, R] def __init__(self, inner: http.StreamDecodeIteratorSync[T], item_wrap_cb: wrap_cb_t[T, R]): self._inner = inner self._item_wrap_cb = item_wrap_cb def __iter__(self) -> "ResponseWrapIterator[T, R]": return self def __next__(self) -> R: return self._item_wrap_cb(next(self._inner)) def __enter__(self) -> "ResponseWrapIterator[T, R]": self._inner.__enter__() return self def __exit__(self, *args: ty.Any) -> None: self._inner.__exit__(*args) def close(self) -> None: self._inner.close() class _inner_func_t(ty_ext.Protocol, ty.Generic[T]): def __call__(self, *args: ty.Any, **kwargs: ty.Any) \ -> ty.Union[ty.List[T], http.StreamDecodeIteratorSync[T], None]: ... class _returns_multiple_wrapper2_t(ty_ext.Protocol, ty.Generic[T, R]): @ty.overload def __call__(self, *args: ty.Any, stream: bool = ..., return_result: utils.Literal_False, **kwargs: ty.Any) -> None: ... @ty.overload def __call__(self, *args: ty.Any, stream: utils.Literal_True, return_result: utils.Literal_True = ..., **kwargs: ty.Any) -> ty.List[R]: ... @ty.overload def __call__(self, *args: ty.Any, stream: utils.Literal_False = ..., return_result: utils.Literal_True = ..., **kwargs: ty.Any ) -> ResponseWrapIterator[T, R]: ... class _returns_multiple_wrapper1_t(ty_ext.Protocol, ty.Generic[T, R]): def __call__(self, func: _inner_func_t[T]) -> _returns_multiple_wrapper2_t[T, R]: ... def returns_multiple_items(item_wrap_cb: wrap_cb_t[T, R] = ident, *, stream: bool = False) \ -> _returns_multiple_wrapper1_t[T, R]: def wrapper1(func: _inner_func_t[T]) -> _returns_multiple_wrapper2_t[T, R]: @functools.wraps(func) def wrapper2(*args: ty.Any, **kwargs: ty.Any) \ -> ty.Union[None, ty.List[R], ResponseWrapIterator[T, R]]: result = func(*args, **kwargs) if isinstance(result, list): return [item_wrap_cb(r) for r in result] if result is None: # WORKAROUND: Remove the `or …` part in 0.7.X assert not kwargs.get("return_result", True) or kwargs.get("quiet", False) return None assert kwargs.get("stream", False) or stream, ( "Called IPFS HTTP-Client function should only ever return a list, " "when not streaming a response" ) return ResponseWrapIterator(result, item_wrap_cb) return wrapper2 return wrapper1 class _returns_single_wrapper2_t(ty_ext.Protocol, ty.Generic[T, R]): @ty.overload def __call__(self, *args: ty.Any, stream: bool = ..., return_result: utils.Literal_False, **kwargs: ty.Any) -> None: ... @ty.overload def __call__(self, *args: ty.Any, stream: utils.Literal_True, return_result: utils.Literal_True = ..., **kwargs: ty.Any) -> R: ... @ty.overload def __call__(self, *args: ty.Any, stream: utils.Literal_False = ..., return_result: utils.Literal_True = ..., **kwargs: ty.Any ) -> ResponseWrapIterator[T, R]: ... class _returns_single_wrapper1_t(ty_ext.Protocol, ty.Generic[T, R]): def __call__(self, func: _inner_func_t[T]) -> _returns_single_wrapper2_t[T, R]: ... def returns_single_item(item_wrap_cb: wrap_cb_t[T, R] = ident, *, stream: bool = False) \ -> _returns_single_wrapper1_t[T, R]: def wrapper1(func: _inner_func_t[T]) -> _returns_single_wrapper2_t[T, R]: @functools.wraps(func) def wrapper2(*args: ty.Any, **kwargs: ty.Any) \ -> ty.Union[None, R, ResponseWrapIterator[T, R]]: result = func(*args, **kwargs) if isinstance(result, list): assert len(result) == 1, ("Called IPFS HTTP-Client function should " "only ever return one item") return item_wrap_cb(result[0]) if result is None: assert not kwargs.get("return_result", True) return None assert kwargs.get("stream", False) or stream, ( "Called IPFS HTTP-Client function should only ever return a list " "with a single item, when not streaming a response" ) return ResponseWrapIterator(result, item_wrap_cb) return wrapper2 return wrapper1 class _returns_single_wrapper_t(ty_ext.Protocol): @ty.overload def __call__(self, *args: ty.Any, stream: utils.Literal_True, **kwargs: ty.Any) \ -> ResponseWrapIterator[None, None]: ... @ty.overload def __call__(self, *args: ty.Any, stream: utils.Literal_False = ..., **kwargs: ty.Any) -> None: ... def returns_no_item(func: _inner_func_t[ty.NoReturn]) -> _returns_single_wrapper_t: @functools.wraps(func) def wrapper(*args: ty.Any, **kwargs: ty.Any) \ -> ty.Union[None, ResponseWrapIterator[None, None]]: result = func(*args, **kwargs) if isinstance(result, (list, bytes, object)): assert not result, ("Called IPFS HTTP-Client function should never " "return a non-empty item") return None assert kwargs.get("stream", False), ( # type: ignore[unreachable] "Called IPFS HTTP-Client function should only ever return an empty " "object, when not streaming a response" ) return ResponseWrapIterator(result, ident) return wrapper S = ty.TypeVar("S", bound="SectionBase") class SectionProperty(ty.Generic[S]): def __init__(self, cls: ty.Type[S]): self.__prop_cls__ = cls @ty.overload def __get__(self, client_object: "ClientBase", type: None = None) -> S: ... @ty.overload def __get__(self, client_object: None, type: ty.Type["ClientBase"]) -> ty.Type[S]: ... def __get__( self, client_object: ty.Optional["ClientBase"], type: ty.Optional[ty.Type["ClientBase"]] = None ) -> ty.Union[ty.Type[S], S]: if client_object is not None: # We are invoked on object try: return client_object.__prop_objs__[self] # type: ignore except AttributeError: client_object.__prop_objs__ = { # type: ignore self: self.__prop_cls__(client_object) } return client_object.__prop_objs__[self] # type: ignore except KeyError: client_object.__prop_objs__[self] = self.__prop_cls__(client_object) # type: ignore return client_object.__prop_objs__[self] # type: ignore else: # We are invoked on the class return self.__prop_cls__ class SectionBase: # Accept parent object from property descriptor def __init__(self, parent: "ClientBase") -> None: self.__parent = parent # Proxy the parent's properties @property def _client(self) -> http.ClientSync: return self.__parent._client @property def chunk_size(self) -> int: return self.__parent.chunk_size @chunk_size.setter def chunk_size(self, value: int) -> None: self.__parent.chunk_size = value class ClientBase: def __init__( # type: ignore[no-any-unimported] self, addr: http.addr_t = DEFAULT_ADDR, base: str = DEFAULT_BASE, *, chunk_size: int = multipart.default_chunk_size, offline: bool = False, session: bool = False, auth: http.auth_t = None, cookies: http.cookies_t = None, headers: http.headers_t = {}, # type: ignore[assignment] # False positive timeout: http.timeout_t = 120, # Backward-compat username: ty.Optional[str] = None, password: ty.Optional[str] = None ): """ Arguments --------- addr The `Multiaddr `_ describing the API daemon location, as used in the *API* key of `go-ipfs Addresses section `_ Supported addressing patterns are currently: * ``/{dns,dns4,dns6,ip4,ip6}//tcp/`` (HTTP) * ``/{dns,dns4,dns6,ip4,ip6}//tcp//http`` (HTTP) * ``/{dns,dns4,dns6,ip4,ip6}//tcp//https`` (HTTPS) Additional forms (proxying) may be supported in the future. base The HTTP URL path prefix (or “base”) at which the API is exposed on the API daemon chunk_size The size of data chunks passed to the operating system when uploading files or text/binary content offline Ask daemon to operate in “offline mode” – that is, it should not consult the network when unable to find resources locally, but fail instead session Create this :class:`~ipfshttpclient.Client` instance with a session already open? (Useful for long-running client objects.) auth HTTP basic authentication `(username, password)` tuple to send along with each request to the API daemon cookies HTTP cookies to send along with each request to the API daemon headers Custom HTTP headers to send along with each request to the API daemon timeout Connection timeout (in seconds) when connecting to the API daemon If a tuple is passed its contents will be interpreted as the values for the connecting and receiving phases respectively, otherwise the value will apply to both phases. The default value is implementation-defined. A value of `math.inf` disables the respective timeout. """ self.chunk_size = chunk_size if auth is None and (username or password): assert username and password auth = (username, password) self._client = http.ClientSync( addr, base, offline=offline, auth=auth, cookies=cookies, headers=headers, timeout=timeout, ) if session: self._client.open_session() self._workarounds = self._client.workarounds PKyP(1k ipfshttpclient/client/bitswap.pyimport typing as ty from . import base class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def wantlist(self, peer: ty.Optional[str] = None, **kwargs: base.CommonArgs): """Returns blocks currently on the bitswap wantlist .. code-block:: python >>> client.bitswap.wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer Peer to show wantlist for Returns ------- dict +------+----------------------------------------------------+ | Keys | List of blocks the connected daemon is looking for | +------+----------------------------------------------------+ """ args = (peer,) return self._client.request('/bitswap/wantlist', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def stat(self, **kwargs: base.CommonArgs): """Returns some diagnostic information from the bitswap agent .. code-block:: python >>> client.bitswap.stat() {'BlocksReceived': 96, 'DupBlksReceived': 73, 'DupDataReceived': 2560601, 'ProviderBufLen': 0, 'Peers': [ 'QmNZFQRxt9RMNm2VVtuV2Qx7q69bcMWRVXmr5CEkJEgJJP', 'QmNfCubGpwYZAQxX8LQDsYgB48C4GbfZHuYdexpX9mbNyT', 'QmNfnZ8SCs3jAtNPc8kf3WJqJqSoX7wsX7VqkLdEYMao4u', … ], 'Wantlist': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ] } Returns ------- dict Statistics, peers and wanted blocks """ return self._client.request('/bitswap/stat', decoder='json', **kwargs)PKPipfshttpclient/client/block.pyfrom . import base from .. import multipart from .. import utils class Section(base.SectionBase): """Interacting with raw IPFS blocks""" def get(self, cid: base.cid_t, **kwargs: base.CommonArgs): r"""Returns the raw contents of a block .. code-block:: python >>> client.block.get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- cid The CID of an existing block to get Returns ------- bytes Contents of the requested block """ args = (str(cid),) return self._client.request('/block/get', args, **kwargs) @base.returns_single_item(base.ResponseBase) def put(self, file: utils.clean_file_t, **kwargs: base.CommonArgs): """Stores the contents of the given file object as an IPFS block .. code-block:: python >>> client.block.put(io.BytesIO(b'Mary had a little lamb')) {'Key': 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'Size': 22} Parameters ---------- file The data to be stored as an IPFS block Returns ------- dict Information about the new block See :meth:`~ipfshttpclient.Client.block.stat` """ body, headers = multipart.stream_files(file, chunk_size=self.chunk_size) return self._client.request('/block/put', decoder='json', data=body, headers=headers, **kwargs) @base.returns_single_item(base.ResponseBase) def stat(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Returns a dict with the size of the block with the given hash. .. code-block:: python >>> client.block.stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- cid The CID of an existing block to stat Returns ------- dict Information about the requested block """ args = (str(cid),) return self._client.request('/block/stat', args, decoder='json', **kwargs)PKPrD"ipfshttpclient/client/bootstrap.pyfrom . import base class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def add(self, peer: base.multiaddr_t, *peers: base.multiaddr_t, **kwargs: base.CommonArgs): """Adds peers to the bootstrap list Parameters ---------- peer IPFS Multiaddr of a peer to add to the list Returns ------- dict """ args = (str(peer), *(str(p) for p in peers)) return self._client.request('/bootstrap/add', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def list(self, **kwargs: base.CommonArgs): """Returns the addresses of peers used during initial discovery of the IPFS network Peers are output in the format ``/``. .. code-block:: python >>> client.bootstrap.list() {'Peers': [ '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', '/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRa … ca9z', '/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKD … KrGM', … '/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3p … QBU3' ]} Returns ------- dict +-------+-------------------------------+ | Peers | List of known bootstrap peers | +-------+-------------------------------+ """ return self._client.request('/bootstrap', decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def rm(self, peer: base.multiaddr_t, *peers: base.multiaddr_t, **kwargs: base.CommonArgs): """Removes peers from the bootstrap list Parameters ---------- peer IPFS Multiaddr of a peer to remove from the list Returns ------- dict """ args = (str(peer), *(str(p) for p in peers)) return self._client.request('/bootstrap/rm', args, decoder='json', **kwargs) PKP.ooipfshttpclient/client/config.pyfrom . import base from .. import utils class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def get(self, **kwargs: base.CommonArgs): #TODO: Support the optional `key` parameter """Returns the currently used node configuration .. code-block:: python >>> config = client.config.get() >>> config['Addresses'] {'API': '/ip4/127.0.0.1/tcp/5001', 'Gateway': '/ip4/127.0.0.1/tcp/8080', 'Swarm': ['/ip4/0.0.0.0/tcp/4001', '/ip6/::/tcp/4001']}, >>> config['Discovery'] {'MDNS': {'Enabled': True, 'Interval': 10}} Returns ------- dict The entire IPFS daemon configuration """ return self._client.request('/config/show', decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def replace(self, config: utils.json_dict_t, **kwargs: base.CommonArgs): """Replaces the existing configuration with a new configuration tree Make sure to back up the config file first if neccessary, as this operation can not be undone. """ return self._client.request('/config/replace', (config,), decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def set(self, key: str, value: utils.json_value_t = None, **kwargs: base.CommonArgs): """Adds or replaces a single configuration value .. code-block:: python >>> client.config.set("Addresses.Gateway") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'} >>> client.config.set("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'} Parameters ---------- key The key of the configuration entry (e.g. "Addresses.API") value The value to set the configuration entry to Returns ------- dict +-------+---------------------------------------------+ | Key | The requested configuration key | +-------+---------------------------------------------+ | Value | The new value of the this configuration key | +-------+---------------------------------------------+ """ args = (key, value) return self._client.request('/config', args, decoder='json', **kwargs)PKnQ\?ipfshttpclient/client/dag.pyfrom . import base from .. import multipart from .. import utils class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def get(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Retrieves the contents of a DAG node .. code-block:: python >>> client.dag.get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55} ]} Parameters ---------- cid Key of the object to retrieve, in CID format Returns ------- dict Cid with the address of the dag object """ args = (str(cid),) return self._client.request('/dag/get', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def put(self, data: utils.clean_file_t, **kwargs: base.CommonArgs): """Decodes the given input file as a DAG object and returns their key .. code-block:: python >>> client.dag.put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Cid': { '/': 'bafyreifgjgbmtykld2e3yncey3naek5xad3h4m2pxmo3of376qxh54qk34' } } Parameters ---------- data IO stream object of path to a file containing the data to put Returns ------- dict Cid with the address of the dag object """ body, headers = multipart.stream_files(data, chunk_size=self.chunk_size) return self._client.request('/dag/put', decoder='json', data=body, headers=headers, **kwargs) @base.returns_single_item(base.ResponseBase) def resolve(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Resolves a DAG node from its CID, returning its address and remaining path .. code-block:: python >>> client.dag.resolve('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Cid': { '/': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D' } } Parameters ---------- cid Key of the object to resolve, in CID format Returns ------- dict Cid with the address of the dag object """ args = (str(cid),) return self._client.request('/dag/resolve', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def imprt(self, data: utils.clean_file_t, **kwargs: base.CommonArgs): """Imports a .car file with a DAG into IPFS .. code-block:: python >>> with open('data.car', 'rb') as file ... client.dag.imprt(file) {'Root': { 'Cid': { '/': 'bafyreidepjmjhvhlvp5eyxqpmyyi7rxwvl7wsglwai3cnvq63komq4tdya' } } } *Note*: This method is named ``.imprt`` (rather than ``.import``) to avoid causing a Python :exc:`SyntaxError` due to ``import`` being global keyword in Python. Parameters ---------- data IO stream object with data that should be imported Returns ------- dict Dictionary with the root CID of the DAG imported """ body, headers = multipart.stream_files(data, chunk_size=self.chunk_size) return self._client.request('/dag/import', decoder='json', data=body, headers=headers, **kwargs) def export(self, cid: str, **kwargs: base.CommonArgs): """Exports a DAG into a .car file format .. code-block:: python >>> data = client.dag.export('bafyreidepjmjhvhlvp5eyxqpmyyi7rxwvl7wsglwai3cnvq63komq4tdya') *Note*: When exporting larger DAG structures, remember that you can set the *stream* parameter to ``True`` on any method to have it return results incrementally. Parameters ---------- cid Key of the object to export, in CID format Returns ------- bytes DAG in a .car format """ args = (str(cid),) return self._client.request('/dag/export', args, **kwargs) PKPf̜ɖipfshttpclient/client/dht.pyfrom . import base from .. import exceptions class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def findpeer(self, peer_id: str, *peer_ids: str, **kwargs: base.CommonArgs): """Queries the DHT for all of the associated multiaddresses .. code-block:: python >>> client.dht.findpeer("QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZN … MTLZ") [{'ID': 'QmfVGMFrwW6AV6fTWmD6eocaTybffqAvkVLXQEFrYdk6yc', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmTKiUdjbRjeN9yPhNhG1X38YNuBdjeiV9JXYWzCAJ4mj5', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmTGkgHSsULk8p3AKTAqKixxidZQXFyF7mCURcutPqrwjQ', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 2, 'Responses': [ {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Addrs': [ '/ip4/10.9.8.1/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/164.132.197.107/tcp/4001', '/ip4/127.0.0.1/tcp/4001']} ]}] Parameters ---------- peer_id The ID of the peer to search for Returns ------- dict List of multiaddrs """ args = (peer_id,) + peer_ids return self._client.request('/dht/findpeer', args, decoder='json', **kwargs) @base.returns_multiple_items(base.ResponseBase) def findprovs(self, cid: base.cid_t, *cids: base.cid_t, **kwargs: base.CommonArgs): """Finds peers in the DHT that can provide a specific value .. code-block:: python >>> client.dht.findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2") [{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 4, 'Responses': [ {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None} ]}, {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 1, 'Responses': [ {'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [ '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.8/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/52.32.109.74/tcp/1028' ]} ]}] Parameters ---------- cid The DHT key to find providers for Returns ------- dict List of provider Peer IDs """ args = (str(cid),) + tuple(str(c) for c in cids) return self._client.request('/dht/findprovs', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def get(self, key: str, *keys: str, **kwargs: base.CommonArgs): """Queries the DHT for its best value related to given key There may be several different values for a given key stored in the DHT; in this context *best* means the record that is most desirable. There is no one metric for *best*: it depends entirely on the key type. For IPNS, *best* is the record that is both valid and has the highest sequence number (freshest). Different key types may specify other rules for what they consider to be the *best*. Parameters ---------- key One or more keys whose values should be looked up Returns ------- str """ args = (key,) + keys res = self._client.request('/dht/get', args, decoder='json', **kwargs) if isinstance(res, dict) and "Extra" in res: return res["Extra"] else: for r in res: if "Extra" in r and len(r["Extra"]) > 0: return r["Extra"] raise exceptions.Error("empty response from DHT") #TODO: Implement `provide(cid)` @base.returns_multiple_items(base.ResponseBase) def put(self, key: str, value: str, **kwargs: base.CommonArgs): """Writes a key/value pair to the DHT Given a key of the form ``/foo/bar`` and a value of any form, this will write that value to the DHT with that key. Keys have two parts: a keytype (foo) and the key name (bar). IPNS uses the ``/ipns/`` keytype, and expects the key name to be a Peer ID. IPNS entries are formatted with a special strucutre. You may only use keytypes that are supported in your ``ipfs`` binary: ``go-ipfs`` currently only supports the ``/ipns/`` keytype. Unless you have a relatively deep understanding of the key's internal structure, you likely want to be using the :meth:`~ipfshttpclient.Client.name_publish` instead. Value is arbitrary text. .. code-block:: python >>> client.dht.put("QmVgNoP89mzpgEAAqK8owYoDEyB97Mkc … E9Uc", "test123") [{'ID': 'QmfLy2aqbhU1RqZnGQyqHSovV8tDufLUaPfN1LNtg5CvDZ', 'Extra': '', 'Type': 5, 'Responses': None}, {'ID': 'QmZ5qTkNvvZ5eFq9T4dcCEK7kX8L7iysYEpvQmij9vokGE', 'Extra': '', 'Type': 5, 'Responses': None}, {'ID': 'QmYqa6QHCbe6eKiiW6YoThU5yBy8c3eQzpiuW22SgVWSB8', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': 'QmP6TAKVDCziLmx9NV8QGekwtf7ZMuJnmbeHMjcfoZbRMd', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- key A unique identifier value Abitrary text to associate with the input (2048 bytes or less) Returns ------- list """ args = (key, value) return self._client.request('/dht/put', args, decoder='json', **kwargs) @base.returns_multiple_items(base.ResponseBase) def query(self, peer_id: str, *peer_ids: str, **kwargs: base.CommonArgs): """Finds the closest Peer IDs to a given Peer ID by querying the DHT. .. code-block:: python >>> client.dht.query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ") [{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs', 'Extra': '', 'Type': 2, 'Responses': None}, … {'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- peer_id The peerID to run the query against Returns ------- dict List of peers IDs """ args = (peer_id,) + peer_ids return self._client.request('/dht/query', args, decoder='json', **kwargs)PKP$V699ipfshttpclient/client/files.pyimport typing as ty from . import base from .. import multipart from .. import utils class Section(base.SectionBase): """Manage files in IPFS's virtual “Mutable File System” (MFS) file storage space""" @base.returns_no_item def cp(self, source: str, dest: str, **kwargs: base.CommonArgs): """Creates a copy of a file within the MFS Due to the nature of IPFS this will not actually involve any copying of the file's content. Instead, a new link will be added to the directory containing *dest* referencing the CID of *source* – this is very similar to how hard links to read-only files work in classical filesystems. .. code-block:: python >>> client.files.ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} >>> client.files.cp("/test", "/bla") >>> client.files.ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} Parameters ---------- source Filepath within the MFS to copy from dest Destination filepath within the MFS to which the file will be copied/linked to """ args = (source, dest) return self._client.request('/files/cp', args, **kwargs) #TODO: Add `flush(path="/")` @base.returns_single_item(base.ResponseBase) def ls(self, path: str, **kwargs: base.CommonArgs): """Lists contents of a directory in the MFS .. code-block:: python >>> client.files.ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0} ]} Parameters ---------- path Filepath within the MFS Returns ------- dict +---------+------------------------------------------+ | Entries | List of files in the given MFS directory | +---------+------------------------------------------+ """ args = (path,) return self._client.request('/files/ls', args, decoder='json', **kwargs) @base.returns_no_item def mkdir(self, path: str, parents: bool = False, **kwargs: base.CommonArgs): """Creates a directory within the MFS .. code-block:: python >>> client.files.mkdir("/test") Parameters ---------- path Filepath within the MFS parents Create parent directories as needed and do not raise an exception if the requested directory already exists """ kwargs.setdefault("opts", {})["parents"] = parents args = (path,) return self._client.request('/files/mkdir', args, **kwargs) @base.returns_no_item def mv(self, source: str, dest: str, **kwargs: base.CommonArgs): """Moves files and directories within the MFS .. code-block:: python >>> client.files.mv("/test/file", "/bla/file") Parameters ---------- source Existing filepath within the MFS dest Destination to which the file will be moved in the MFS """ args = (source, dest) return self._client.request('/files/mv', args, **kwargs) def read(self, path: str, offset: int = 0, count: ty.Optional[int] = None, **kwargs: base.CommonArgs): """Reads a file stored in the MFS .. code-block:: python >>> client.files.read("/bla/file") b'hi' Parameters ---------- path Filepath within the MFS offset Byte offset at which to begin reading at count Maximum number of bytes to read (default is the entire remaining length) Returns ------- bytes : MFS file contents """ opts = {"offset": offset} if count is not None: opts["count"] = count kwargs.setdefault("opts", {}).update(opts) args = (path,) return self._client.request('/files/read', args, **kwargs) @base.returns_no_item def rm(self, path: str, recursive: bool = False, **kwargs: base.CommonArgs): """Removes a file from the MFS Note that the file's contents will not actually be removed from the IPFS node until the next repository GC run. If it is important to have the file's contents erased from the node this may be done manually by calling :meth`~ipfshttpclient.Client.repo.gc` at a time of convenience. .. code-block:: python >>> client.files.rm("/bla/file") Parameters ---------- path Filepath within the MFS recursive Recursively remove directories? """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (path,) return self._client.request('/files/rm', args, **kwargs) @base.returns_single_item(base.ResponseBase) def stat(self, path: str, **kwargs: base.CommonArgs): """Returns basic ``stat`` information for an MFS file (including its hash) .. code-block:: python >>> client.files.stat("/test") {'Hash': 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', 'Size': 0, 'CumulativeSize': 4, 'Type': 'directory', 'Blocks': 0} Parameters ---------- path Filepath within the MFS Returns ------- dict : MFS file information """ args = (path,) return self._client.request('/files/stat', args, decoder='json', **kwargs) @base.returns_no_item def write(self, path: str, file: utils.clean_file_t, offset: int = 0, create: bool = False, truncate: bool = False, count: ty.Optional[int] = None, **kwargs: base.CommonArgs): """Writes a file into the MFS .. code-block:: python >>> client.files.write("/test/file", io.BytesIO(b"hi"), create=True) Parameters ---------- path Filepath within the MFS file IO stream object with data that should be written offset Byte offset at which to begin writing at create Create the file if it does not exist truncate Truncate the file to size zero before writing count Maximum number of bytes to read from the source ``file`` """ opts = {"offset": offset, "create": create, "truncate": truncate} if count is not None: opts["count"] = count kwargs.setdefault("opts", {}).update(opts) args = (path,) body, headers = multipart.stream_files(file, chunk_size=self.chunk_size) return self._client.request('/files/write', args, data=body, headers=headers, **kwargs) class Base(base.ClientBase): files = base.SectionProperty(Section) def add(self, file: utils.clean_file_t, *files: utils.clean_file_t, recursive: bool = False, pattern: multipart.match_spec_t[ty.AnyStr] = None, trickle: bool = False, follow_symlinks: bool = False, period_special: bool = True, only_hash: bool = False, wrap_with_directory: bool = False, chunker: ty.Optional[str] = None, pin: bool = True, raw_leaves: bool = None, nocopy: bool = False, cid_version: ty.Optional[int] = None, **kwargs: base.CommonArgs): """Adds a file, several files or directory of files to IPFS Arguments marked as “directories only” will be ignored unless *file* refers to a directory path or file descriptor. Passing a directory file descriptor is currently restricted to Unix (due to Python standard library limitations on Windows) and will prevent the *nocopy* feature from working. .. code-block:: python >>> with io.open('nurseryrhyme.txt', 'w', encoding='utf-8') as f: ... numbytes = f.write('Mary had a little lamb') >>> client.add('nurseryrhyme.txt') {'Hash': 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab', 'Name': 'nurseryrhyme.txt'} Directory uploads ----------------- By default only regular files and directories immediately below the given directory path/FD are uploaded to the connected IPFS node; to upload an entire directory tree instead, *recursive* can be set to ``True``. Symbolic links and special files (pipes, sockets, devices nodes, …) cannot be represented by the UnixFS data structure this call creates and hence are ignored while scanning the target directory, to include the targets of symbolic links in the upload set *follow_symlinks* to ``True``. The set of files and directories included in the upload may be restricted by passing any combination of glob matching strings, compiled regular expression objects and custom :class:`~ipfshttpclient.filescanner.Matcher` objects. A file or directory will be included if it matches of the patterns provided. For regular expressions please note that as predicting which directories are relevant to the given pattern is impossible to do reliably if *recursive* is set to ``True`` the entire directory hierarchy will always be scanned and compared to the given expression even if only very few files are actually matched by the expression. To avoid this, pass a custom matching class or use glob-patterns instead (which will only cause a scan of the directories required to match their value). Note that unlike the ``ipfs add`` CLI interface this implementation will be default include dot-files (“files that are hidden”) – any file or directory whose name starts with a period/dot character – in the upload. For behaviour that is similar to the CLI command set *pattern* to ``"**"`` – this enables the default glob behaviour of not matching dot-files unless *period_special* is set to ``False`` or the pattern actually starts with a period. Arguments --------- file A filepath, path-object, file descriptor or open file object the file or directory to add recursive Upload files in subdirectories, if *file* refers to a directory? pattern A :mod:`glob` pattern, compiled regular expression object or arbitrary matcher used to limit the files and directories included as part of adding a directory (directories only) trickle Use trickle-dag format (optimized for streaming) when generating the dag; see `the old FAQ `_ for more information follow_symlinks Follow symbolic links when recursively scanning directories? (directories only) period_special Treat files and directories with a leading period character (“dot-files”) specially in glob patterns? (directories only) If this is set these files will only be matched by path labels whose initial character is a period, but not by those starting with ``?``, ``*`` or ``[``. only_hash Only chunk and hash, but do not write to disk wrap_with_directory Wrap files with a directory object to preserve their filename chunker The chunking algorithm to use pin Pin this object when adding raw_leaves Use raw blocks for leaf nodes. (experimental). (Default: ``True`` when *nocopy* is True, or ``False`` otherwise) nocopy Add the file using filestore. Implies raw-leaves. (experimental). cid_version CID version. Default value is provided by IPFS daemon. (experimental) Returns ------- Union[dict, list] File name and hash of the added file node, will return a list of one or more items unless only a single file (not directory) was given """ opts = { "trickle": trickle, "only-hash": only_hash, "wrap-with-directory": wrap_with_directory, "pin": pin, "raw-leaves": raw_leaves if raw_leaves is not None else nocopy, "nocopy": nocopy } # type: ty.Dict[str, ty.Union[str, bool]] for option_name, option_value in [ ("chunker", chunker), ("cid-version", cid_version), ]: if option_value is not None: opts[option_name] = option_value kwargs.setdefault("opts", {}).update(opts) # There may be other cases where nocopy will silently fail to work, but # this is by far the most obvious one if isinstance(file, int) and nocopy: raise ValueError("Passing file descriptors is incompatible with *nocopy*") assert not isinstance(file, (tuple, list)), \ "Use `client.add(name1, name2, …)` to add several items" multiple = (len(files) > 0) to_send = ((file,) + files) if multiple else file body, headers, is_dir = multipart.stream_filesystem_node( to_send, chunk_size=self.chunk_size, follow_symlinks=follow_symlinks, period_special=period_special, patterns=pattern, recursive=recursive ) resp = self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs) if not multiple and not is_dir and not wrap_with_directory: assert len(resp) == 1 return base.ResponseBase(resp[0]) elif kwargs.get("stream", False): return base.ResponseWrapIterator(resp, base.ResponseBase) return [base.ResponseBase(v) for v in resp] @base.returns_no_item def get(self, cid: base.cid_t, target: utils.path_t = ".", **kwargs: base.CommonArgs) -> None: """Downloads a file, or directory of files from IPFS Parameters ---------- cid The path to the IPFS object(s) to be outputted target The directory to place the downloaded files in Defaults to the current working directory. """ args = (str(cid),) return self._client.download('/get', target, args, **kwargs) def cat(self, cid: base.cid_t, offset: int = 0, length: ty.Optional[int] = None, **kwargs: base.CommonArgs): r"""Retrieves the contents of a file identified by hash .. code-block:: python >>> client.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> client.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'\n\n\n\nipfs example viewer</…' Parameters ---------- cid The name or path of the IPFS object(s) to be retrieved offset Byte offset to begin reading from length Maximum number of bytes to read (defaults to reading the entire file) Returns ------- bytes The file's contents """ args = (str(cid),) opts = {} if offset != 0: opts['offset'] = offset if length is not None: opts['length'] = length kwargs.setdefault('opts', opts) return self._client.request('/cat', args, **kwargs) @base.returns_single_item(base.ResponseBase) def ls(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Returns a list of objects linked to by the given hash .. code-block:: python >>> client.ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Objects': [ {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174, 'Type': 2}, … {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55, 'Type': 2} ] } ]} Parameters ---------- cid The path to the IPFS object(s) to list links from Returns ------- dict Directory information and contents """ args = (str(cid),) return self._client.request('/ls', args, decoder='json', **kwargs) PK�����P|Gg ��g �����ipfshttpclient/client/key.pyfrom . import base class Section(base.SectionBase): #TODO: Add `export(name, password)` @base.returns_single_item(base.ResponseBase) def gen(self, key_name: str, type: str, size: int = 2048, **kwargs: base.CommonArgs): """Adds a new public key that can be used for :meth:`~ipfshttpclient.Client.name.publish` .. code-block:: python >>> client.key.gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name Name of the new Key to be generated. Used to reference the Keys. type Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size Bitsize of key to generate Returns ------- dict +------+---------------------------------------------------+ | Name | The name of the newly generated key | +------+---------------------------------------------------+ | Id | The key ID/fingerprint of the newly generated key | +------+---------------------------------------------------+ """ opts = {"type": type, "size": size} kwargs.setdefault("opts", {}).update(opts) args = (key_name,) return self._client.request('/key/gen', args, decoder='json', **kwargs) #TODO: Add `import(name, pam, password)` @base.returns_single_item(base.ResponseBase) def list(self, **kwargs: base.CommonArgs): """Returns a list of all available IPNS keys .. code-block:: python >>> client.key.list() {'Keys': [ {'Name': 'self', 'Id': 'QmQf22bZar3WKmojipms22PkXH1MZGmvsqzQtuSvQE3uhm'}, {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} ]} Returns ------- dict +------+--------------------------------------------------------+ | Keys | List of dictionaries with Names and Ids of public keys | +------+--------------------------------------------------------+ """ return self._client.request('/key/list', decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def rename(self, key_name: str, new_key_name: str, **kwargs: base.CommonArgs): """Rename an existing key .. code-block:: python >>> client.key.rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name Current name of the key to rename new_key_name New name of the key Returns ------- dict Information about the key renameal """ args = (key_name, new_key_name) return self._client.request( '/key/rename', args, decoder='json', **kwargs ) @base.returns_single_item(base.ResponseBase) def rm(self, key_name: str, *key_names: str, **kwargs: base.CommonArgs): """Removes one or more keys .. code-block:: python >>> client.key.rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name Name of the key(s) to remove. Returns ------- dict +------+--------------------------------------------------+ | Keys | List of key names and IDs that have been removed | +------+--------------------------------------------------+ """ args = (key_name,) + key_names return self._client.request('/key/rm', args, decoder='json', **kwargs) PK�����Pi9V��V��&���ipfshttpclient/client/miscellaneous.pyimport typing as ty from . import base from .. import exceptions class Base(base.ClientBase): @base.returns_single_item(base.ResponseBase) def dns(self, domain_name: str, recursive: bool = False, **kwargs: base.CommonArgs): """Resolves DNS links to their referenced dweb-path CIDs are hard to remember, but domain names are usually easy to remember. To create memorable aliases for CIDs, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> client.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name The domain-name name to resolve recursive Resolve until the name is not a DNS link Returns ------- dict +------+-------------------------------------+ | Path | Resource were a DNS entry points to | +------+-------------------------------------+ """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (domain_name,) return self._client.request('/dns', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def id(self, peer: ty.Optional[str] = None, **kwargs: base.CommonArgs): """Returns general information of an IPFS Node Returns the PublicKey, ProtocolVersion, ID, AgentVersion and Addresses of the connected daemon or some other node. .. code-block:: python >>> client.id() {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc', 'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=', 'AgentVersion': 'go-libp2p/3.3.4', 'ProtocolVersion': 'ipfs/0.1.0', 'Addresses': [ '/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc', '/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc', '/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc', '/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc', '/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc', '/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']} Parameters ---------- peer Peer.ID of the node to look up (local node if ``None``) Returns ------- dict Information about the IPFS node """ args = (peer,) if peer is not None else () return self._client.request('/id', args, decoder='json', **kwargs) #TODO: isOnline() @base.returns_multiple_items(base.ResponseBase) def ping(self, peer: str, *peers: str, count: int = 10, **kwargs: base.CommonArgs): """Provides round-trip latency information for the routing system. Finds nodes via the routing system, sends pings, waits for pongs, and prints out round-trip latency information. .. code-block:: python >>> client.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n") [{'Success': True, 'Time': 0, 'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'}, {'Success': False, 'Time': 0, 'Text': 'Peer lookup error: routing: not found'}] .. hint:: Pass ``stream=True`` to receive ping progress reports as they arrive. Parameters ---------- peer ID of peer(s) to be pinged count Number of ping messages to send Returns ------- list Progress reports from the ping """ kwargs.setdefault("opts", {})["count"] = count args = (peer,) + peers return self._client.request('/ping', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def resolve(self, path: str, recursive: bool = False, **kwargs: base.CommonArgs): """Resolves an dweb-path and return the path of the referenced item There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> client.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> client.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- path The name to resolve recursive Resolve until the result is an IPFS name Returns ------- dict +------+-------------------------------------+ | Path | IPFS path of the requested resource | +------+-------------------------------------+ """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (path,) return self._client.request('/resolve', args, decoder='json', **kwargs) @base.returns_no_item def stop(self): """Stops the connected IPFS daemon instance Sending any further requests after this will fail with :class:`~ipfshttpclient.exceptions.ConnectionError`, unless you start another IPFS daemon instance at the same address. """ try: self._client.request('/shutdown') except exceptions.ConnectionError: # Sometimes the daemon kills the connection before sending a # response causing an incorrect `ConnectionError` to bubble pass @base.returns_single_item(base.ResponseBase) def version(self, **kwargs: base.CommonArgs): """Returns the software versions of the currently connected node .. code-block:: python >>> client.version() {'Version': '0.4.3-rc2', 'Repo': '4', 'Commit': '', 'System': 'amd64/linux', 'Golang': 'go1.6.2'} Returns ------- dict Daemon and system version information """ return self._client.request('/version', decoder='json', **kwargs)PK�����PB0�������ipfshttpclient/client/name.pyimport typing as ty from . import base class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def publish(self, ipfs_path: str, resolve: bool = True, lifetime: ty.Union[str, int] = "24h", ttl: ty.Union[str, int] = None, key: str = None, allow_offline: bool = False, **kwargs: base.CommonArgs): """Publishes an object to IPNS IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> client.name.publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path IPFS path of the object to be published allow_offline When offline, save the IPNS record to the the local datastore without broadcasting to the network instead of simply failing. lifetime Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` resolve Resolve given path before publishing ttl Time duration this record should be cached for. Same syntax like 'lifetime' option. (experimental feature) key Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict +-------+----------------------------------------------------------+ | Name | Key ID of the key to which the given value was published | +-------+----------------------------------------------------------+ | Value | Value that was published | +-------+----------------------------------------------------------+ """ opts = {"lifetime": str(lifetime), "resolve": resolve, "allow-offline": allow_offline} if ttl: opts["ttl"] = str(ttl) if key: opts["key"] = key kwargs.setdefault("opts", {}).update(opts) args = (ipfs_path,) return self._client.request('/name/publish', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def resolve(self, name: str = None, recursive: bool = False, nocache: bool = False, dht_record_count: ty.Optional[int] = None, dht_timeout: ty.Optional[ty.Union[str, int]] = None, **kwargs: base.CommonArgs): """Retrieves the value currently published at the given IPNS name IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> client.name.resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name The IPNS name to resolve (defaults to the connected node) recursive Resolve until the result is not an IPFS name (default: false) nocache Do not use cached entries (default: false) dht_record_count Number of records to request for DHT resolution. dht_timeout Maximum time to collect values during DHT resolution, e.g. "30s". For the exact syntax see the ``lifetime`` argument on :meth:`~ipfshttpclient.Client.name.publish`. Set this parameter to ``0`` to disable the timeout. Returns ------- dict +------+--------------------------------------+ | Path | The resolved value of the given name | +------+--------------------------------------+ """ opts = {"recursive": recursive, "nocache": nocache} if dht_record_count is not None: opts["dht-record-count"] = str(dht_record_count) if dht_timeout is not None: opts["dht-timeout"] = str(dht_timeout) kwargs.setdefault("opts", {}).update(opts) args = (name,) if name is not None else () return self._client.request('/name/resolve', args, decoder='json', **kwargs)PK�����Pj!=+��+�����ipfshttpclient/client/object.pyimport typing as ty from . import base from .. import multipart from .. import utils class PatchSection(base.SectionBase): @base.returns_single_item(base.ResponseBase) def add_link(self, root: base.cid_t, name: str, ref: base.cid_t, create: bool = False, **kwargs: base.CommonArgs): """Creates a new merkledag object based on an existing one The new object will have an additional link to the given CID. .. code-block:: python >>> client.object.patch.add_link( ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2', ... 'Johnny', ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2' ... ) {'Hash': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'} Parameters ---------- root IPFS hash for the object being modified name name for the new link ref IPFS hash for the object being linked to create Create intermediary nodes Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ kwargs.setdefault("opts", {})["create"] = create args = (str(root), name, str(ref),) return self._client.request('/object/patch/add-link', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def append_data(self, cid: base.cid_t, new_data: utils.clean_file_t, **kwargs: base.CommonArgs): """Creates a new merkledag object based on an existing one The new object will have the same links as the previous object, but with the provided data appended to it. .. code-block:: python >>> client.object.patch.append_data("QmZZmY … fTqm", io.BytesIO(b"bla")) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- cid The hash of an ipfs object to modify new_data The data to append to the object's data section Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ args = (str(cid),) body, headers = multipart.stream_files(new_data, chunk_size=self.chunk_size) return self._client.request('/object/patch/append-data', args, decoder='json', data=body, headers=headers, **kwargs) @base.returns_single_item(base.ResponseBase) def rm_link(self, root: base.cid_t, link: str, **kwargs: base.CommonArgs): """Creates a new merkledag object based on an existing one The new object will lack a link to the specified object, but otherwise be unchanged. .. code-block:: python >>> client.object.patch.rm_link( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... 'Johnny' ... ) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- root IPFS hash of the object to modify link name of the link to remove Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ args = (str(root), link) return self._client.request('/object/patch/rm-link', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def set_data(self, root: base.cid_t, data: utils.clean_file_t, **kwargs: base.CommonArgs): """Creates a new merkledag object based on an existing one The new object will have the same links as the old object but with the provided data instead of the old object's data contents. .. code-block:: python >>> client.object.patch.set_data( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... io.BytesIO(b'bla') ... ) {'Hash': 'QmSw3k2qkv4ZPsbu9DVEJaTMszAQWNgM1FTFYpfZeNQWrd'} Parameters ---------- root IPFS hash of the object to modify data The new data to store in root Returns ------- dict +------+----------------------------------+ | Hash | Hash of the newly derived object | +------+----------------------------------+ """ args = (str(root),) body, headers = multipart.stream_files(data, chunk_size=self.chunk_size) return self._client.request('/object/patch/set-data', args, decoder='json', data=body, headers=headers, **kwargs) class Section(base.SectionBase): patch = base.SectionProperty(PatchSection) def data(self, cid: base.cid_t, **kwargs: base.CommonArgs): r"""Returns the raw bytes in an IPFS object .. code-block:: python >>> client.object.data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- cid Key of the object to retrieve, in CID format Returns ------- bytes Raw object data """ args = (str(cid),) return self._client.request('/object/data', args, **kwargs) @base.returns_single_item(base.ResponseBase) def get(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Get and serialize the DAG node named by CID. .. code-block:: python >>> client.object.get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55} ]} Parameters ---------- cid Key of the object to retrieve, in CID format Returns ------- dict +-------+------------------------------------------------+ | Data | Raw object data (ISO-8859-1 decoded) | +-------+------------------------------------------------+ | Links | List of links associated with the given object | +-------+------------------------------------------------+ """ args = (str(cid),) return self._client.request('/object/get', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def links(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Returns the links pointed to by the specified object .. code-block:: python >>> client.object.links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- cid Key of the object to retrieve, in CID format Returns ------- dict +-------+------------------------------------------------+ | Hash | The requested object CID | +-------+------------------------------------------------+ | Links | List of links associated with the given object | +-------+------------------------------------------------+ """ args = (str(cid),) return self._client.request('/object/links', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def new(self, template: ty.Optional[str] = None, **kwargs: base.CommonArgs): """Creates a new object from an IPFS template By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> client.object.new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict +-------+----------------------------------------+ | Hash | The hash of the requested empty object | +-------+----------------------------------------+ """ args = (template,) if template is not None else () return self._client.request('/object/new', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def put(self, file: utils.clean_file_t, **kwargs: base.CommonArgs): """Stores input as a DAG object and returns its key. .. code-block:: python >>> client.object.put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file (JSON) object from which the DAG object will be created Returns ------- dict Hash and links of the created DAG object See the :meth:`~ipfshttpclient.Client.object.links` method for details. """ body, headers = multipart.stream_files(file, chunk_size=self.chunk_size) return self._client.request('/object/put', decoder='json', data=body, headers=headers, **kwargs) @base.returns_single_item(base.ResponseBase) def stat(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Get stats for the DAG node named by cid. .. code-block:: python >>> client.object.stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- cid Key of the object to retrieve, in CID format Returns ------- dict """ args = (str(cid),) return self._client.request('/object/stat', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def diff(self, a: base.cid_t, b: base.cid_t, **kwargs: base.CommonArgs): """Diff two cids. .. code-block:: python >>> client.object.diff( 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n', 'QmV4QR7MCBj5VTi6ddHmXPyjWGzbaKEtX2mx7axA5PA13G' ) {'Changes': [{ 'Type': 2, 'Path': '', 'Before': {'/': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'}, 'After': {'/': 'QmV4QR7MCBj5VTi6ddHmXPyjWGzbaKEtX2mx7axA5PA13G'}}]} Parameters ---------- a Key of object a for comparison b Key of object b for comparison Returns ------- dict """ args = (str(a), str(b)) return self._client.request('/object/diff', args, decoder='json', **kwargs)PK�����P\pW!��!�����ipfshttpclient/client/pin.pyfrom . import base class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def add(self, path: base.cid_t, *paths: base.cid_t, recursive: bool = True, **kwargs: base.CommonArgs): """Pins objects to the node's local repository Stores an IPFS object(s) from a given path in the local repository. .. code-block:: python >>> client.pin.add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d") {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path Path to object(s) to be pinned recursive Recursively unpin the object linked to by the specified object(s) Returns ------- dict +------+-----------------------------------------------------------+ | Pins | List of IPFS objects that have been pinned by this action | +------+-----------------------------------------------------------+ """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (str(path), *(str(p) for p in paths)) return self._client.request('/pin/add', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def ls(self, *paths: base.cid_t, type: str = "all", **kwargs: base.CommonArgs): """Lists objects pinned in the local repository By default, all pinned objects are returned, but the ``type`` flag or arguments can restrict that to a specific pin type or to some specific objects respectively. In particular the ``type="recursive"`` argument will only list objects added ``.pin.add(…)`` (or similar) and will greatly speed processing as obtaining this list does *not* require a complete repository metadata scan. .. code-block:: python >>> client.pin.ls() {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}, 'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'}, 'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'}, … 'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'} }} >>> # While the above works you should always try to use `type="recursive"` >>> # instead as it will greatly speed up processing and only lists >>> # explicit pins (added with `.pin.add(…)` or similar), rather than >>> # than all objects that won't be removed as part of `.repo.gc()`: >>> client.pin.ls(type="recursive") {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}, 'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'}, … }} >>> client.pin.ls('/ipfs/QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz') {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}}} >>> client.pin.ls('/ipfs/QmdBCSn4UJP82MjhRVwpABww48tXL3 … mA6z') ipfshttpclient.exceptions.ErrorResponse: path '/ipfs/QmdBCSn4UJP82MjhRVwpABww48tXL3 … mA6z' is not pinned Parameters ---------- paths The IPFS paths or CIDs to search for If none are passed, return information about all pinned objects. If any of the passed CIDs is not pinned, then remote will return an error and an :exc:`ErrorResponse` exception will be raised. type The type of pinned keys to list. Can be: * ``"direct"`` * ``"indirect"`` * ``"recursive"`` * ``"all"`` Raises ------ ~ipfsapi.exceptions.ErrorResponse Remote returned an error. Remote will return an error if any of the passed CIDs is not pinned. In this case, the exception will contain 'not pinned' in its args[0]. Returns ------- dict +------+--------------------------------------------------------------+ | Keys | Mapping of IPFS object names currently pinned to their types | +------+--------------------------------------------------------------+ """ kwargs.setdefault("opts", {})["type"] = type args = tuple(str(p) for p in paths) return self._client.request('/pin/ls', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def rm(self, path: base.cid_t, *paths: base.cid_t, recursive: bool = True, **kwargs: base.CommonArgs): """Removes a pinned object from local storage Removes the pin from the given object allowing it to be garbage collected if needed. That is, depending on the node configuration it may not be garbage anytime soon or at all unless you manually clean up the local repository using :meth:`~ipfshttpclient.repo.gc`. Also note that an object is pinned both directly (that is its type is ``"recursive"``) and indirectly (meaning that it is referenced by another object that is still pinned) it may not be removed at all after this. .. code-block:: python >>> client.pin.rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d') {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path Path to object(s) to be unpinned recursive Recursively unpin the object linked to by the specified object(s) Returns ------- dict +------+-------------------------------------------------------------+ | Pins | List of IPFS objects that have been unpinned by this action | +------+-------------------------------------------------------------+ """ kwargs.setdefault("opts", {})["recursive"] = recursive args = (str(path), *(str(p) for p in paths)) return self._client.request('/pin/rm', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def update(self, from_path: base.cid_t, to_path: base.cid_t, *, unpin: bool = True, **kwargs: base.CommonArgs): """Replaces one pin with another Updates one pin to another, making sure that all objects in the new pin are local. Then removes the old pin. This is an optimized version of using first using :meth:`~ipfshttpclient.Client.pin.add` to add a new pin for an object and then using :meth:`~ipfshttpclient.Client.pin.rm` to remove the pin for the old object. .. code-block:: python >>> client.pin.update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", ... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH") {"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", "/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]} Parameters ---------- from_path Path to the old object to_path Path to the new object to be pinned unpin Should the pin of the old object be removed? Returns ------- dict +------+-------------------------------------------------------------+ | Pins | List of IPFS objects that have been affected by this action | +------+-------------------------------------------------------------+ """ kwargs.setdefault("opts", {})["unpin"] = unpin args = (str(from_path), str(to_path)) return self._client.request('/pin/update', args, decoder='json', **kwargs) @base.returns_multiple_items(base.ResponseBase, stream=True) def verify(self, path: base.cid_t, *paths: base.cid_t, verbose: bool = False, **kwargs: base.CommonArgs): """Verifies that all recursive pins are completely available in the local repository Scan the repo for pinned object graphs and check their integrity. Issues will be reported back with a helpful human-readable error message to aid in error recovery. This is useful to help recover from datastore corruptions (such as when accidentally deleting files added using the filestore backend). This function returns an iterator has to be exhausted or closed using either a context manager (``with``-statement) or its ``.close()`` method. .. code-block:: python >>> with client.pin.verify("QmN…TTZ", verbose=True) as pin_verify_iter: ... for item in pin_verify_iter: ... print(item) ... {"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True} {"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True} {"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True} … Parameters ---------- path Path to object(s) to be checked verbose Also report status of items that were OK? Returns ------- Iterable[dict] +-----+----------------------------------------------------+ | Cid | IPFS object ID checked | +-----+----------------------------------------------------+ | Ok | Whether the given object was successfully verified | +-----+----------------------------------------------------+ """ kwargs.setdefault("opts", {})["verbose"] = verbose args = (str(path), *(str(p) for p in paths)) return self._client.request('/pin/verify', args, decoder='json', stream=True, **kwargs)PK�����PVTµ�������ipfshttpclient/client/pubsub.pyimport typing as ty from . import base class SubChannel: """Wrapper for a pubsub subscription object that allows for easy closing of subscriptions. """ def __init__(self, sub): self.__sub = sub # type: str def read_message(self): return next(self.__sub) def __iter__(self): return self.__sub def close(self): self.__sub.close() def __enter__(self): return self def __exit__(self, *a): self.close() class Section(base.SectionBase): @base.returns_single_item(base.ResponseBase) def ls(self, **kwargs: base.CommonArgs): """Lists subscribed topics by name This method returns data that contains a list of all topics the user is subscribed to. In order to subscribe to a topic ``pubsub.sub`` must be called. .. code-block:: python # subscribe to a channel >>> with client.pubsub.sub("hello") as sub: ... client.pubsub.ls() { 'Strings' : ["hello"] } Returns ------- dict +---------+-------------------------------------------------+ | Strings | List of topic the IPFS daemon is subscribbed to | +---------+-------------------------------------------------+ """ return self._client.request('/pubsub/ls', decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def peers(self, topic: ty.Optional[str] = None, **kwargs: base.CommonArgs): """Lists the peers we are pubsubbing with Lists the IDs of other IPFS users who we are connected to via some topic. Without specifying a topic, IPFS peers from all subscribed topics will be returned in the data. If a topic is specified only the IPFS id's of the peers from the specified topic will be returned in the data. .. code-block:: python >>> client.pubsub.peers() {'Strings': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', 'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA', ... 'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a' ] } ## with a topic # subscribe to a channel >>> with client.pubsub.sub('hello') as sub: ... client.pubsub.peers(topic='hello') {'String': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', ... # other peers connected to the same channel ] } Parameters ---------- topic The topic to list connected peers of (defaults to None which lists peers for all topics) Returns ------- dict +---------+-------------------------------------------------+ | Strings | List of PeerIDs of peers we are pubsubbing with | +---------+-------------------------------------------------+ """ args = (topic,) if topic is not None else () return self._client.request('/pubsub/peers', args, decoder='json', **kwargs) @base.returns_no_item def publish(self, topic: str, payload: str, **kwargs: base.CommonArgs): """Publish a message to a given pubsub topic Publishing will publish the given payload (string) to everyone currently subscribed to the given topic. All data (including the ID of the publisher) is automatically base64 encoded when published. .. code-block:: python # publishes the message 'message' to the topic 'hello' >>> client.pubsub.publish('hello', 'message') [] Parameters ---------- topic Topic to publish to payload Data to be published to the given topic Returns ------- list An empty list """ args = (topic, payload) return self._client.request('/pubsub/pub', args, decoder='json', **kwargs) def subscribe(self, topic: str, discover: bool = False, **kwargs: base.CommonArgs): """Subscribes to mesages on a given topic Subscribing to a topic in IPFS means anytime a message is published to a topic, the subscribers will be notified of the publication. The connection with the pubsub topic is opened and read. The Subscription returned should be used inside a context manager to ensure that it is closed properly and not left hanging. .. code-block:: python >>> sub = client.pubsub.subscribe('testing') >>> with client.pubsub.subscribe('testing') as sub: ... # publish a message 'hello' to the topic 'testing' ... client.pubsub.publish('testing', 'hello') ... for message in sub: ... print(message) ... # Stop reading the subscription after ... # we receive one publication ... break {'from': '<base64encoded IPFS id>', 'data': 'aGVsbG8=', 'topicIDs': ['testing']} # NOTE: in order to receive published data # you must already be subscribed to the topic at publication # time. Parameters ---------- topic Name of a topic to subscribe to discover Try to discover other peers subscibed to the same topic (defaults to False) Returns ------- :class:`SubChannel` Generator wrapped in a context manager that maintains a connection stream to the given topic. """ args = (topic, discover) return SubChannel(self._client.request('/pubsub/sub', args, stream=True, decoder='json'))PK�����P相5 �� �����ipfshttpclient/client/repo.pyimport warnings from . import base class Section(base.SectionBase): @base.returns_multiple_items(base.ResponseBase) def gc(self, *, quiet: bool = False, return_result: bool = True, **kwargs: base.CommonArgs): """Removes stored objects that are not pinned from the repo .. code-block:: python >>> client.repo.gc() [{'Key': 'QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQuwaHG2mpW2'}, {'Key': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'}, {'Key': 'QmRVBnxUCsD57ic5FksKYadtyUbMsyo9KYQKKELajqAp4q'}, … {'Key': 'QmYp4TeCurXrhsxnzt5wqLqqUz8ZRg5zsc7GuUrUSDtwzP'}] Performs a garbage collection sweep of the local set of stored objects and remove ones that are not pinned in order to reclaim hard disk space. Returns the hashes of all collected objects. Parameters ---------- quiet Should the client will avoid downloading the list of removed objects? Passing ``True`` to this parameter often causing the GC process to speed up tremendously as it will also avoid generating the list of removed objects in the connected daemon at all. return_result If ``False`` this is a legacy alias for ``quiet=True``. (Will be dropped in py-ipfs-api-client 0.7.x!) Returns ------- dict List of IPFS objects that have been removed """ if not return_result: warnings.warn("Parameter `return_result` of `.repo.gc(…)` is deprecated " "in favour of the newer `quiet` parameter", DeprecationWarning) quiet = quiet or not return_result if "use_http_head_for_no_result" not in self._client.workarounds: # go-ipfs 0.4.22- does not support the quiet option yet kwargs.setdefault("opts", {})["quiet"] = quiet kwargs.setdefault("return_result", not quiet) return self._client.request('/repo/gc', decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def stat(self, **kwargs: base.CommonArgs): """Returns local repository status information .. code-block:: python >>> client.repo.stat() {'NumObjects': 354, 'RepoPath': '…/.local/share/ipfs', 'Version': 'fs-repo@4', 'RepoSize': 13789310} Returns ------- dict General information about the IPFS file repository +------------+-------------------------------------------------+ | NumObjects | Number of objects in the local repo. | +------------+-------------------------------------------------+ | RepoPath | The path to the repo being currently used. | +------------+-------------------------------------------------+ | RepoSize | Size in bytes that the repo is currently using. | +------------+-------------------------------------------------+ | Version | The repo version. | +------------+-------------------------------------------------+ """ return self._client.request('/repo/stat', decoder='json', **kwargs) #TODO: `version()` PK�����ɰPw,�������ipfshttpclient/client/swarm.pyfrom . import base class FiltersSection(base.SectionBase): @base.returns_single_item(base.ResponseBase) def add(self, address: base.multiaddr_t, *addresses: base.multiaddr_t, **kwargs: base.CommonArgs): """Adds a given multiaddr filter to the filter/ignore list This will add an address filter to the daemons swarm. Filters applied this way will not persist daemon reboots, to achieve that, add your filters to the configuration file. .. code-block:: python >>> client.swarm.filters.add("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address Multiaddr to avoid connecting to Returns ------- dict +---------+-----------------------------+ | Strings | List of swarm filters added | +---------+-----------------------------+ """ args = (str(address), *(str(a) for a in address)) return self._client.request('/swarm/filters/add', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def rm(self, address: base.multiaddr_t, *addresses: base.multiaddr_t, **kwargs: base.CommonArgs): """Removes a given multiaddr filter from the filter list This will remove an address filter from the daemons swarm. Filters removed this way will not persist daemon reboots, to achieve that, remove your filters from the configuration file. .. code-block:: python >>> client.swarm.filters.rm("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address Multiaddr filter to remove Returns ------- dict +---------+-------------------------------+ | Strings | List of swarm filters removed | +---------+-------------------------------+ """ args = (str(address), *(str(a) for a in address)) return self._client.request('/swarm/filters/rm', args, decoder='json', **kwargs) class Section(base.SectionBase): filters = base.SectionProperty(FiltersSection) @base.returns_single_item(base.ResponseBase) def addrs(self, **kwargs: base.CommonArgs): """Returns the addresses of currently connected peers by peer id .. code-block:: python >>> pprint(client.swarm.addrs()) {'Addrs': { 'QmNMVHJTSZHTWMWBbmBrQgkA1hZPWYuVJx2DpSGESWW6Kn': [ '/ip4/10.1.0.1/tcp/4001', '/ip4/127.0.0.1/tcp/4001', '/ip4/51.254.25.16/tcp/4001', '/ip6/2001:41d0:b:587:3cae:6eff:fe40:94d8/tcp/4001', '/ip6/2001:470:7812:1045::1/tcp/4001', '/ip6/::1/tcp/4001', '/ip6/fc02:2735:e595:bb70:8ffc:5293:8af8:c4b7/tcp/4001', '/ip6/fd00:7374:6172:100::1/tcp/4001', '/ip6/fd20:f8be:a41:0:c495:aff:fe7e:44ee/tcp/4001', '/ip6/fd20:f8be:a41::953/tcp/4001'], 'QmNQsK1Tnhe2Uh2t9s49MJjrz7wgPHj4VyrZzjRe8dj7KQ': [ '/ip4/10.16.0.5/tcp/4001', '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.1/tcp/4001', '/ip4/178.62.107.36/tcp/4001', '/ip6/::1/tcp/4001'], … }} Returns ------- dict Multiaddrs of peers by peer id +-------+--------------------------------------------------------+ | Addrs | Mapping of PeerIDs to a list its advertised multiaddrs | +-------+--------------------------------------------------------+ """ return self._client.request('/swarm/addrs', decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def connect(self, address: base.multiaddr_t, *addresses: base.multiaddr_t, **kwargs: base.CommonArgs): """Attempts to connect to a peer at the given multiaddr This will open a new direct connection to a peer address. The address format is an IPFS multiaddr, e.g.:: /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ .. code-block:: python >>> client.swarm.connect("/ip4/104.131.131.82/tcp/4001/ipfs/Qma … uvuJ") {'Strings': ['connect QmaCpDMGvV2BGHeYERUEnRQAwe3 … uvuJ success']} Parameters ---------- address Address of peer to connect to Returns ------- dict Textual connection status report """ args = (str(address), *(str(a) for a in address)) return self._client.request('/swarm/connect', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def disconnect(self, address: base.multiaddr_t, *addresses: base.multiaddr_t, **kwargs: base.CommonArgs): """Closes any open connection to a given multiaddr This will close a connection to a peer address. The address format is an IPFS multiaddr:: /ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ The disconnect is not permanent; if IPFS needs to talk to that address later, it will reconnect. To avoid this, add a filter for the given address before disconnecting. .. code-block:: python >>> client.swarm.disconnect("/ip4/104.131.131.82/tcp/4001/ipfs/Qm … uJ") {'Strings': ['disconnect QmaCpDMGvV2BGHeYERUEnRQA … uvuJ success']} Parameters ---------- address Address of peer to disconnect from Returns ------- dict Textual connection status report """ args = (str(address), *(str(a) for a in address)) return self._client.request('/swarm/disconnect', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def peers(self, **kwargs: base.CommonArgs): """Returns the addresses & IDs of currently connected peers .. code-block:: python >>> client.swarm.peers() {'Strings': [ '/ip4/101.201.40.124/tcp/40001/ipfs/QmZDYAhmMDtnoC6XZ … kPZc', '/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYER … uvuJ', '/ip4/104.223.59.174/tcp/4001/ipfs/QmeWdgoZezpdHz1PX8 … 1jB6', … '/ip6/fce3: … :f140/tcp/43901/ipfs/QmSoLnSGccFuZQJzRa … ca9z' ]} Returns ------- dict +---------+----------------------------------------------------+ | Strings | List of Multiaddrs that the daemon is connected to | +---------+----------------------------------------------------+ """ return self._client.request('/swarm/peers', decoder='json', **kwargs) PK�����ZPq)4��4��!���ipfshttpclient/client/unstable.pyfrom . import base class LogSection(base.SectionBase): @base.returns_single_item(base.ResponseBase) def level(self, subsystem: str, level: str, **kwargs: base.CommonArgs): r"""Changes the logging output level for a given subsystem **This API is subject to future change or removal!** .. code-block:: python >>> client.unstable.log.level("path", "info") {"Message": "Changed log level of 'path' to 'info'\n"} Parameters ---------- subsystem The subsystem logging identifier (Use ``"all"`` for all subsystems) level The desired logging level. Must be one of: * ``"debug"`` * ``"info"`` * ``"warning"`` * ``"error"`` * ``"fatal"`` * ``"panic"`` Returns ------- dict +--------+-----------------------+ | Status | Textual status report | +--------+-----------------------+ """ args = (subsystem, level) return self._client.request('/log/level', args, decoder='json', **kwargs) @base.returns_single_item(base.ResponseBase) def ls(self, **kwargs: base.CommonArgs): """Lists the available logging subsystems **This API is subject to future change or removal!** .. code-block:: python >>> client.unstable.log.ls() {'Strings': [ 'github.com/ipfs/go-libp2p/p2p/host', 'net/identify', 'merkledag', 'providers', 'routing/record', 'chunk', 'mfs', 'ipns-repub', 'flatfs', 'ping', 'mockrouter', 'dagio', 'cmds/files', 'blockset', 'engine', 'mocknet', 'config', 'commands/http', 'cmd/ipfs', 'command', 'conn', 'gc', 'peerstore', 'core', 'coreunix', 'fsrepo', 'core/server', 'boguskey', 'github.com/ipfs/go-libp2p/p2p/host/routed', 'diagnostics', 'namesys', 'fuse/ipfs', 'node', 'secio', 'core/commands', 'supernode', 'mdns', 'path', 'table', 'swarm2', 'peerqueue', 'mount', 'fuse/ipns', 'blockstore', 'github.com/ipfs/go-libp2p/p2p/host/basic', 'lock', 'nat', 'importer', 'corerepo', 'dht.pb', 'pin', 'bitswap_network', 'github.com/ipfs/go-libp2p/p2p/protocol/relay', 'peer', 'transport', 'dht', 'offlinerouting', 'tarfmt', 'eventlog', 'ipfsaddr', 'github.com/ipfs/go-libp2p/p2p/net/swarm/addr', 'bitswap', 'reprovider', 'supernode/proxy', 'crypto', 'tour', 'commands/cli', 'blockservice']} Returns ------- dict +---------+-----------------------------------+ | Strings | List of daemon logging subsystems | +---------+-----------------------------------+ """ return self._client.request('/log/ls', decoder='json', **kwargs) @base.returns_multiple_items(base.ResponseBase, stream=True) def tail(self, **kwargs: base.CommonArgs): r"""Streams log outputs as they are generated **This API is subject to future change or removal!** This function returns an iterator that needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with client.unstable.log.tail() as log_tail_iter: ... for item in log_tail_iter: ... print(item) ... {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.43353297Z"} {"event":"handleAddProviderBegin","system":"dht", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433642581Z"} {"event":"handleAddProvider","system":"dht","duration":91704, "key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433747513Z"} {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.435843012Z"} … Returns ------- Iterable[dict] """ return self._client.request('/log/tail', decoder='json', stream=True, **kwargs) class RefsSection(base.SectionBase): @base.returns_multiple_items(base.ResponseBase) def __call__(self, cid: base.cid_t, **kwargs: base.CommonArgs): """Returns the hashes of objects referenced by the given hash **This API is subject to future change or removal!** You likely want to use :meth:`~ipfshttpclient.object.links` instead. .. code-block:: python >>> client.unstable.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- cid Path to the object(s) to list refs from Returns ------- list """ args = (str(cid),) return self._client.request('/refs', args, decoder='json', **kwargs) @base.returns_multiple_items(base.ResponseBase) def local(self, **kwargs: base.CommonArgs): """Returns the hashes of all local objects **This API is subject to future change or removal!** .. code-block:: python >>> client.unstable.refs.local() [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Returns ------- list """ return self._client.request('/refs/local', decoder='json', **kwargs) class Section(base.SectionBase): """Features that are subject to change and are only provided for convenience""" log = base.SectionProperty(LogSection) refs = base.SectionProperty(RefsSection)PK�����Nr$IGST��T��&���ipfshttpclient-0.6.1.dist-info/LICENSEThe MIT License (MIT) Copyright (c) 2015 Andrew Stocker <amstocker@dons.usfca.edu> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK������!HP���O���$���ipfshttpclient-0.6.1.dist-info/WHEEL HM K-*ϳR03rOK-J,/RH,szd&Y)r$[)T&Ur�PK������!HlN��S*��'���ipfshttpclient-0.6.1.dist-info/METADATAZR#I?O=ENAw 44hjI Fe"O~UZhՒSg/ nԴ33 }1H=tENf2i$D!;jtaf.=+0 . 4ѨTE EV[Ep86Nr|O/^swr3FFl>h'CG]hd%P3F 2iYY G܏Xjf =ix,f/y`z~i>.tsY5ֵ=.ċ ފ$q)H+ЉơM!ǁroFf hU̅Gkfh9-@)= Q.aG}[^V.fbvtس'!O+axjWC 9O}aD<BމזJ>9b8Nk,v�NxS !p?}ݱ;6x̤$ d~Ꙏw;Ujl.IpE*hESvYM[z[5S̚tU4jEA^:<%#p 1Hj,q9ԫDmn-?E:N7S]gf3DQ0h{+e"Ŗrn' x+KA@WL*m9w+( /v8VVD|)Bw \ޙ/ȍp"Wߴ6FGU4,U^$3w>R\@:fⰋ>H{fDOn:Hg>.IUptYr9tPĵ'#6!b}x{QeqD.#aWAY!)<ɳlw#O' ة<㚒T=K^Y4ӲCUv̉lyrolNp[_Fǰevw^Ћp{Ȣ,e7W1%ʏ\!_8|QHYowGmFD{r1 BdHAgù$&>I@"هO.O/YzFczuVkzs H`񂙾Hi3y I�XX< $&)GBmHD q(eT/1 'Mt#HչOd<նMZa$>8IK4Ӕ$>2V@?YY )+te2}>\q*0SpQIM6 #EeQJP3lH, S]!KLLUcͨ 2Jfq*5�<pP�'H AKKk1lC_<uUFg' 8e9PD"euzC}HnT%A\8&W],nsJv3B&+s.&_/+q4K`**g %Nτ%ʝ"_]s9kZ~ ^д[1pr<,-2Z)N%6/@-!n"K򍔆ХM42܂5<_.!CIXu�f:7#^cZ`IӴ]82u�Ζ4FJ0$+@QKGqLH;HT<y1b2cK,tC*r٦?DZI^*Y$vrj#}HKb{R΃Cg<WUwk>]]M>9o[zih\@;`6NJa)$neww;s(?%%m(NsD(cbe(DDm{jQ2yq9*Ay%h4 1[blN޲e!Pƚ8* 2DڭnIkkR wDhe$b7Qr"  mˏ*TJ9D0)e6 gɌ/@7W M!tH2RdAeC8P]B H uPH3!M*>GjܶE6SQ e&s.THX*(yϞ1΀'k2$0R`'PrQ#W-rH Sϗѡ`Ȃ;M?n?Y@7o˂}~ kQ&GM2J}3y/xAd) di}cCn H : ѡB7zv^j8bM ~Ӡ)HM>ռ[kw_Ĩk{'?oڥu) y/Jw ,E"PSў ?Jt UOyî t[F|BZf'vƮ `]=m6w^dEE^hb9|HABLG?pξV'^_?^=lF}l]-^w vBFHQy1~N[dcgwl_k '1WLRJya2*~7D/t9I7* _Szk.u3_R"g^|uyoe sle[j0W+.ӢB!ȟM\Xn㇓Qo$>Qy*ֻgA2͍[-lw3w=lMa}؏/^U*J$YP~3xɋC [ <@/nC>rl_kngI=z"C/;Ng(8⇻dx(ɖw?pһ:x|x4w_^LY^jȻhVzL./xv{0ŶkNxJ>}h/Ou'0E+\ ae2dmT}Ȃݎ;Ψ "lAᾠ|S[aG\ow" 4vn.w{q|v𵓽dq_ntѼ|;yH.OoE|KUwhp>M:(-Nr*�nYUPiZS-l&>^'D򛂼7+MAݛry.!ٙHѰPez7MlT=(:Ɵb~ӒH7 E_us'{x+J{@eES3=)[q'VPۯqW={dE(ʦ{ai l.hGA?TR,LP8Ҍ& ]G站SXx_~D5޲|v|p,P}4 FOKzZ?TPc'[9麕]9"[W'ө SrT�teN wVjfFjZq(wXjmn 4RFO<sZha ?CF+mߤc#9L{lx? {hH`Z^5? OD8n6A,T~ t?H#8$PW%a)&k!5XEǜh?,4 #3l9w|l+SlL \,?�Ȓr<H(�G`bt{$ $n\o$I qC/K\";e)~Q#n~ o +X\:}#PKÏ4;A6=ő?_:fvO<rcffS*惗fV.C/`a 1d]9KU2Y<z W*Ў.?ŊHxh7DM]cJcAjU)kbOݠ7\�LAs2V'Qz0&eO<YU.mz e=n+`LOG�� 3D:g7GAtYH\0W`SLH‚|[ B _at+,sj# H3e(GeKHH9G`= d%2OD9:�U<R&8BT9=n(FwLyVJ%J9ɩL($؋'fLZVl9kRL >u파[6�SD2V9S׽Ȅz4fK r@O6 E ɒ( />0c}"SLFd˳dѧVJS.2 n#!5`<!ƴVc�h%()P%GotzD3v_ He# FY8R6eV�*NwoYB4S\>;@G˦'Uu7Y Alϕ5�TE #OT/>QSMtΗ @(o78+r:wA7/*Xq4Z| QQku9(נivPK������!H#OS�� ��%���ipfshttpclient-0.6.1.dist-info/RECORDǒ~8&w",Є"YWr֑=a3X?@&C6mXq=yy]!w[3i#[kR`XBXb+ԨGau#HQr.r-fHnJ'W"֍lB!a܎yS;h! U"I Sa[\Uq~f1�u~BЯQQIFTe$Y}֡A"׻urY&�07/rm̩,҆> 6H( M4Q~ Z.ϛTp} /D$`A/S3'֎ jN4`$$quPom͝ d#uyOr?dt>[ L'DBJC�Ba; ڽ8mv kwtK SJ$* ͩTyI4:/H1PqDgEv |QW}"=x@ {~-߶߼%ۘV^u˙1xH5O?]ͅ`OzIF?Ә躱TP'%E80 zl6+1j!}:PΠ~c9䥬Z25v32 7>wC=F(gtmwHQjE,<ET5(-? Ts@ƽHu}T|8>XQ| ӥ:]hQgUPkܻӻǚL'pل]BJ99<.zHy/waM3cmb 2p?( >E.9[u4u[au%hAҢN. zFa*_ᘾ:wyUVR )ji[&q FMO=XHJ� %?!*h,711Kv8 lv ;)@o/GFׄKnm|uxSnue6Wj?mM Z9R]F{$B0E]9 Veq3}3Mr##n |+[=5,K(֙ [_Q>ol`&.m;]$%z gyL%,'ٚ{r;-.K4<$%X;l<>�05G,fog&G-udHPt )'1_  7kL,JtB݌K?fGL:k:gƁK􄟇sqb}$x ^=W;l=BΔEJpd L03cVȑ A Íu@zq3B˹D{%ϸ/+ʇSNre|3zhJEDWd3:!kž<T ,+# Yֳj1FײM ؞Qغ^e֤ڤXj-pP\!4u!<"G$>iO=`O�PK�����ڔPv}h��h�����������������ipfshttpclient/__init__.pyPK�����Pf�����������������ipfshttpclient/encoding.pyPK�����;P&""��"������������� ��ipfshttpclient/exceptions.pyPK����� P#Nf_��f_�������������*4��ipfshttpclient/filescanner.pyPK�����PP8F����!�����������˓��ipfshttpclient/filescanner_ty.pyiPK�����P>6�����������������ipfshttpclient/http.pyPK�����ֱQY{S��S���������������ipfshttpclient/http_common.pyPK�����tQJQ���������������G��ipfshttpclient/http_httpx.pyPK�����DQ}17���������������|�ipfshttpclient/http_requests.pyPK�����PsHN��HN��������������ipfshttpclient/multipart.pyPK�����Q lW %�� %��"�����������m�ipfshttpclient/requests_wrapper.pyPK�����%Q  �� �������������M�ipfshttpclient/utils.pyPK�����%QG%��%��������������ipfshttpclient/version.pyPK�����nQ4n&��n&��!������������ipfshttpclient/client/__init__.pyPK�����P^)j3��3��������������ipfshttpclient/client/base.pyPK�����yP(1k���� ������������ipfshttpclient/client/bitswap.pyPK�����P��������������� �ipfshttpclient/client/block.pyPK�����PrD����"�����������2�ipfshttpclient/client/bootstrap.pyPK�����P.o��o�������������i�ipfshttpclient/client/config.pyPK�����nQ\?���������������&�ipfshttpclient/client/dag.pyPK�����Pf̜ɖ���������������f7�ipfshttpclient/client/dht.pyPK�����P$V69��9�������������6P�ipfshttpclient/client/files.pyPK�����P|Gg ��g �������������i�ipfshttpclient/client/key.pyPK�����Pi9V��V��&����������� �ipfshttpclient/client/miscellaneous.pyPK�����PB0����������������ipfshttpclient/client/name.pyPK�����Pj!=+��+��������������ipfshttpclient/client/object.pyPK�����P\pW!��!�������������'�ipfshttpclient/client/pin.pyPK�����PVTµ���������������[�ipfshttpclient/client/pubsub.pyPK�����P相5 �� �������������M#�ipfshttpclient/client/repo.pyPK�����ɰPw,��������������� /�ipfshttpclient/client/swarm.pyPK�����ZPq)4��4��!����������� G�ipfshttpclient/client/unstable.pyPK�����Nr$IGST��T��&�����������]�ipfshttpclient-0.6.1.dist-info/LICENSEPK������!HP���O���$�����������b�ipfshttpclient-0.6.1.dist-info/WHEELPK������!HlN��S*��'�����������b�ipfshttpclient-0.6.1.dist-info/METADATAPK������!H#OS�� ��%�����������=u�ipfshttpclient-0.6.1.dist-info/RECORDPK����#�#�q ��{���