nwo
stringlengths
5
106
sha
stringlengths
40
40
path
stringlengths
4
174
language
stringclasses
1 value
identifier
stringlengths
1
140
parameters
stringlengths
0
87.7k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
426k
docstring
stringlengths
0
64.3k
docstring_summary
stringlengths
0
26.3k
docstring_tokens
sequence
function
stringlengths
18
4.83M
function_tokens
sequence
url
stringlengths
83
304
pyscf/pyscf
0adfb464333f5ceee07b664f291d4084801bae64
pyscf/gto/mole.py
python
Mole.ao2mo
(self, mo_coeffs, erifile=None, dataname='eri_mo', intor='int2e', **kwargs)
return ao2mo.kernel(self, mo_coeffs, erifile, dataname, intor, **kwargs)
Integral transformation for arbitrary orbitals and arbitrary integrals. See more detalied documentation in func:`ao2mo.kernel`. Args: mo_coeffs (an np array or a list of arrays) : A matrix of orbital coefficients if it is a numpy ndarray, or four sets of orbital coefficients, corresponding to the four indices of (ij|kl). Kwargs: erifile (str or h5py File or h5py Group object) : The file/object to store the transformed integrals. If not given, the return value is an array (in memory) of the transformed integrals. dataname : str *Note* this argument is effective if erifile is given. The dataset name in the erifile (ref the hierarchy of HDF5 format http://www.hdfgroup.org/HDF5/doc1.6/UG/09_Groups.html). By assigning different dataname, the existed integral file can be reused. If the erifile contains the specified dataname, the old integrals will be replaced by the new one under the key dataname. intor (str) : integral name Name of the 2-electron integral. Ref to :func:`getints_by_shell` for the complete list of available 2-electron integral names Returns: An array of transformed integrals if erifile is not given. Otherwise, return the file/fileobject if erifile is assigned. Examples: >>> import pyscf >>> mol = pyscf.M(atom='O 0 0 0; H 0 1 0; H 0 0 1', basis='sto3g') >>> mo1 = numpy.random.random((mol.nao_nr(), 10)) >>> mo2 = numpy.random.random((mol.nao_nr(), 8)) >>> eri1 = mol.ao2mo(mo1) >>> print(eri1.shape) (55, 55) >>> eri1 = mol.ao2mo(mo1, compact=False) >>> print(eri1.shape) (100, 100) >>> eri1 = mol.ao2mo(eri, (mo1,mo2,mo2,mo2)) >>> print(eri1.shape) (80, 36) >>> eri1 = mol.ao2mo(eri, (mo1,mo2,mo2,mo2), erifile='water.h5')
Integral transformation for arbitrary orbitals and arbitrary integrals. See more detalied documentation in func:`ao2mo.kernel`.
[ "Integral", "transformation", "for", "arbitrary", "orbitals", "and", "arbitrary", "integrals", ".", "See", "more", "detalied", "documentation", "in", "func", ":", "ao2mo", ".", "kernel", "." ]
def ao2mo(self, mo_coeffs, erifile=None, dataname='eri_mo', intor='int2e', **kwargs): '''Integral transformation for arbitrary orbitals and arbitrary integrals. See more detalied documentation in func:`ao2mo.kernel`. Args: mo_coeffs (an np array or a list of arrays) : A matrix of orbital coefficients if it is a numpy ndarray, or four sets of orbital coefficients, corresponding to the four indices of (ij|kl). Kwargs: erifile (str or h5py File or h5py Group object) : The file/object to store the transformed integrals. If not given, the return value is an array (in memory) of the transformed integrals. dataname : str *Note* this argument is effective if erifile is given. The dataset name in the erifile (ref the hierarchy of HDF5 format http://www.hdfgroup.org/HDF5/doc1.6/UG/09_Groups.html). By assigning different dataname, the existed integral file can be reused. If the erifile contains the specified dataname, the old integrals will be replaced by the new one under the key dataname. intor (str) : integral name Name of the 2-electron integral. Ref to :func:`getints_by_shell` for the complete list of available 2-electron integral names Returns: An array of transformed integrals if erifile is not given. Otherwise, return the file/fileobject if erifile is assigned. Examples: >>> import pyscf >>> mol = pyscf.M(atom='O 0 0 0; H 0 1 0; H 0 0 1', basis='sto3g') >>> mo1 = numpy.random.random((mol.nao_nr(), 10)) >>> mo2 = numpy.random.random((mol.nao_nr(), 8)) >>> eri1 = mol.ao2mo(mo1) >>> print(eri1.shape) (55, 55) >>> eri1 = mol.ao2mo(mo1, compact=False) >>> print(eri1.shape) (100, 100) >>> eri1 = mol.ao2mo(eri, (mo1,mo2,mo2,mo2)) >>> print(eri1.shape) (80, 36) >>> eri1 = mol.ao2mo(eri, (mo1,mo2,mo2,mo2), erifile='water.h5') ''' from pyscf import ao2mo return ao2mo.kernel(self, mo_coeffs, erifile, dataname, intor, **kwargs)
[ "def", "ao2mo", "(", "self", ",", "mo_coeffs", ",", "erifile", "=", "None", ",", "dataname", "=", "'eri_mo'", ",", "intor", "=", "'int2e'", ",", "*", "*", "kwargs", ")", ":", "from", "pyscf", "import", "ao2mo", "return", "ao2mo", ".", "kernel", "(", "self", ",", "mo_coeffs", ",", "erifile", ",", "dataname", ",", "intor", ",", "*", "*", "kwargs", ")" ]
https://github.com/pyscf/pyscf/blob/0adfb464333f5ceee07b664f291d4084801bae64/pyscf/gto/mole.py#L3465-L3517
pm4py/pm4py-core
7807b09a088b02199cd0149d724d0e28793971bf
pm4py/algo/organizational_mining/sna/util.py
python
cluster_affinity_propagation
(sna_results: List[List[Any]], parameters=None)
return ret
Performs a clustering using the affinity propagation algorithm provided by Scikit Learn Parameters -------------- sna_results Values for a SNA metric parameters Parameters of the algorithm Returns -------------- clustering Dictionary that contains, for each cluster that has been identified, the list of resources of the cluster
Performs a clustering using the affinity propagation algorithm provided by Scikit Learn
[ "Performs", "a", "clustering", "using", "the", "affinity", "propagation", "algorithm", "provided", "by", "Scikit", "Learn" ]
def cluster_affinity_propagation(sna_results: List[List[Any]], parameters=None) -> Dict[str, List[str]]: """ Performs a clustering using the affinity propagation algorithm provided by Scikit Learn Parameters -------------- sna_results Values for a SNA metric parameters Parameters of the algorithm Returns -------------- clustering Dictionary that contains, for each cluster that has been identified, the list of resources of the cluster """ from sklearn.cluster import AffinityPropagation if parameters is None: parameters = {} matrix = sna_results[0] originators = sna_results[1] affinity_propagation = AffinityPropagation(**parameters) affinity_propagation.fit(matrix) clusters = affinity_propagation.predict(matrix) ret = {} for i in range(len(clusters)): res = originators[i] cluster = str(clusters[i]) if cluster not in ret: ret[cluster] = [] ret[cluster].append(res) return ret
[ "def", "cluster_affinity_propagation", "(", "sna_results", ":", "List", "[", "List", "[", "Any", "]", "]", ",", "parameters", "=", "None", ")", "->", "Dict", "[", "str", ",", "List", "[", "str", "]", "]", ":", "from", "sklearn", ".", "cluster", "import", "AffinityPropagation", "if", "parameters", "is", "None", ":", "parameters", "=", "{", "}", "matrix", "=", "sna_results", "[", "0", "]", "originators", "=", "sna_results", "[", "1", "]", "affinity_propagation", "=", "AffinityPropagation", "(", "*", "*", "parameters", ")", "affinity_propagation", ".", "fit", "(", "matrix", ")", "clusters", "=", "affinity_propagation", ".", "predict", "(", "matrix", ")", "ret", "=", "{", "}", "for", "i", "in", "range", "(", "len", "(", "clusters", ")", ")", ":", "res", "=", "originators", "[", "i", "]", "cluster", "=", "str", "(", "clusters", "[", "i", "]", ")", "if", "cluster", "not", "in", "ret", ":", "ret", "[", "cluster", "]", "=", "[", "]", "ret", "[", "cluster", "]", ".", "append", "(", "res", ")", "return", "ret" ]
https://github.com/pm4py/pm4py-core/blob/7807b09a088b02199cd0149d724d0e28793971bf/pm4py/algo/organizational_mining/sna/util.py#L73-L109
turicas/brasil.io
f1c371fe828a090510259a5027b49e2e651936b4
utils/conversion.py
python
csv2sqlite
( input_filename, output_filename, table_name, samples=30000, batch_size=10000, encoding="utf-8", callback=None, force_types=None, )
[]
def csv2sqlite( input_filename, output_filename, table_name, samples=30000, batch_size=10000, encoding="utf-8", callback=None, force_types=None, ): # Identify data types fobj = open_compressed(input_filename, encoding) reader = csv.reader(fobj) header = next(reader) data = [] for index, row in enumerate(reader): row = dict(zip(header, row)) if index == samples: break data.append(row) fields = rows.import_from_dicts(data, import_fields=header).fields if force_types is not None: fields.update(force_types) # Create lazy table object to be converted table = rows.Table(fields=fields) reader = csv.reader(open_compressed(input_filename, encoding)) next(reader) # skip header table._rows = reader # Export to SQLite rows.export_to_sqlite(table, output_filename, table_name=table_name, callback=callback, batch_size=batch_size)
[ "def", "csv2sqlite", "(", "input_filename", ",", "output_filename", ",", "table_name", ",", "samples", "=", "30000", ",", "batch_size", "=", "10000", ",", "encoding", "=", "\"utf-8\"", ",", "callback", "=", "None", ",", "force_types", "=", "None", ",", ")", ":", "# Identify data types", "fobj", "=", "open_compressed", "(", "input_filename", ",", "encoding", ")", "reader", "=", "csv", ".", "reader", "(", "fobj", ")", "header", "=", "next", "(", "reader", ")", "data", "=", "[", "]", "for", "index", ",", "row", "in", "enumerate", "(", "reader", ")", ":", "row", "=", "dict", "(", "zip", "(", "header", ",", "row", ")", ")", "if", "index", "==", "samples", ":", "break", "data", ".", "append", "(", "row", ")", "fields", "=", "rows", ".", "import_from_dicts", "(", "data", ",", "import_fields", "=", "header", ")", ".", "fields", "if", "force_types", "is", "not", "None", ":", "fields", ".", "update", "(", "force_types", ")", "# Create lazy table object to be converted", "table", "=", "rows", ".", "Table", "(", "fields", "=", "fields", ")", "reader", "=", "csv", ".", "reader", "(", "open_compressed", "(", "input_filename", ",", "encoding", ")", ")", "next", "(", "reader", ")", "# skip header", "table", ".", "_rows", "=", "reader", "# Export to SQLite", "rows", ".", "export_to_sqlite", "(", "table", ",", "output_filename", ",", "table_name", "=", "table_name", ",", "callback", "=", "callback", ",", "batch_size", "=", "batch_size", ")" ]
https://github.com/turicas/brasil.io/blob/f1c371fe828a090510259a5027b49e2e651936b4/utils/conversion.py#L21-L53
hydroshare/hydroshare
7ba563b55412f283047fb3ef6da367d41dec58c6
hs_app_netCDF/serialization.py
python
NetcdfResourceMeta._read_resource_metadata
(self)
[]
def _read_resource_metadata(self): super(NetcdfResourceMeta, self)._read_resource_metadata() print("--- NetcdfResourceMeta ---") # Also parse using SAX so that we can capture certain metadata elements # in the same order in which they appear in the RDF+XML serialization. SAX_parse_results = NetcdfResourceSAXHandler() xml.sax.parse(self.rmeta_path, SAX_parse_results) hsterms = rdflib.namespace.Namespace('https://www.hydroshare.org/terms/') # Get Variable if SAX_parse_results: # Use variables from SAX parser self.variables = list(SAX_parse_results.variables) else: for s, p, o in self._rmeta_graph.triples((None, hsterms.netcdfVariable, None)): var = NetcdfResourceMeta.Variable() # Get name name_lit = self._rmeta_graph.value(o, hsterms.name) if name_lit is None: msg = "Name for Variable was not found for resource {0}".format(self.root_uri) raise GenericResourceMeta.ResourceMetaException(msg) var.name = str(name_lit) # Get shape shape_lit = self._rmeta_graph.value(o, hsterms.shape) if shape_lit is None: msg = "Shape for Variable was not found for resource {0}".format(self.root_uri) raise GenericResourceMeta.ResourceMetaException(msg) var.shape = str(shape_lit) # Get type type_lit = self._rmeta_graph.value(o, hsterms.type) if type_lit is None: msg = "Type for Variable was not found for resource {0}".format(self.root_uri) raise GenericResourceMeta.ResourceMetaException(msg) var.type = str(type_lit) # Get unit unit_lit = self._rmeta_graph.value(o, hsterms.unit) if unit_lit is None: msg = "Unit for Variable was not found for resource {0}".format(self.root_uri) raise GenericResourceMeta.ResourceMetaException(msg) var.unit = str(unit_lit) # Get longName long_name_lit = self._rmeta_graph.value(o, hsterms.longName) if long_name_lit: var.longName = str(long_name_lit) # Get comment comment_lit = self._rmeta_graph.value(o, hsterms.comment) if comment_lit: var.comment = str(comment_lit) # Get missingValue missing_val_lit = self._rmeta_graph.value(o, hsterms.missingValue) if missing_val_lit: var.missingValue = str(missing_val_lit) self.variables.append(var) for v in self.variables: print("\t\t{0}".format(str(v))) # Get spatialReference for s, p, o in self._rmeta_graph.triples((None, hsterms.spatialReference, None)): # Get extent extent_lit = self._rmeta_graph.value(o, hsterms.extent) if extent_lit is None: msg = "Extent not found in spatial reference for resource {0}".format(self.root_uri) raise GenericResourceMeta.ResourceMetaException(msg) extent = str(extent_lit) # Get crsName crs_name_lit = self._rmeta_graph.value(o, hsterms.crsName) crs_name = None if crs_name_lit is not None: crs_name = str(crs_name_lit) # Get crsRepresentationText crs_repr_text_lit = self._rmeta_graph.value(o, hsterms.crsRepresentationText) crs_repr_text = None if crs_repr_text_lit is not None: crs_repr_text = str(crs_repr_text_lit) # Get crsRepresentationType crs_repr_type_lit = self._rmeta_graph.value(o, hsterms.crsRepresentationType) crs_repr_type = None if crs_repr_type_lit is not None: crs_repr_type = str(crs_repr_type_lit) self.spatial_reference = NetcdfResourceMeta.SpatialReference(extent, crs_name, crs_repr_text, crs_repr_type) print("\t\t{0}".format(self.spatial_reference))
[ "def", "_read_resource_metadata", "(", "self", ")", ":", "super", "(", "NetcdfResourceMeta", ",", "self", ")", ".", "_read_resource_metadata", "(", ")", "print", "(", "\"--- NetcdfResourceMeta ---\"", ")", "# Also parse using SAX so that we can capture certain metadata elements", "# in the same order in which they appear in the RDF+XML serialization.", "SAX_parse_results", "=", "NetcdfResourceSAXHandler", "(", ")", "xml", ".", "sax", ".", "parse", "(", "self", ".", "rmeta_path", ",", "SAX_parse_results", ")", "hsterms", "=", "rdflib", ".", "namespace", ".", "Namespace", "(", "'https://www.hydroshare.org/terms/'", ")", "# Get Variable", "if", "SAX_parse_results", ":", "# Use variables from SAX parser", "self", ".", "variables", "=", "list", "(", "SAX_parse_results", ".", "variables", ")", "else", ":", "for", "s", ",", "p", ",", "o", "in", "self", ".", "_rmeta_graph", ".", "triples", "(", "(", "None", ",", "hsterms", ".", "netcdfVariable", ",", "None", ")", ")", ":", "var", "=", "NetcdfResourceMeta", ".", "Variable", "(", ")", "# Get name", "name_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "name", ")", "if", "name_lit", "is", "None", ":", "msg", "=", "\"Name for Variable was not found for resource {0}\"", ".", "format", "(", "self", ".", "root_uri", ")", "raise", "GenericResourceMeta", ".", "ResourceMetaException", "(", "msg", ")", "var", ".", "name", "=", "str", "(", "name_lit", ")", "# Get shape", "shape_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "shape", ")", "if", "shape_lit", "is", "None", ":", "msg", "=", "\"Shape for Variable was not found for resource {0}\"", ".", "format", "(", "self", ".", "root_uri", ")", "raise", "GenericResourceMeta", ".", "ResourceMetaException", "(", "msg", ")", "var", ".", "shape", "=", "str", "(", "shape_lit", ")", "# Get type", "type_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "type", ")", "if", "type_lit", "is", "None", ":", "msg", "=", "\"Type for Variable was not found for resource {0}\"", ".", "format", "(", "self", ".", "root_uri", ")", "raise", "GenericResourceMeta", ".", "ResourceMetaException", "(", "msg", ")", "var", ".", "type", "=", "str", "(", "type_lit", ")", "# Get unit", "unit_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "unit", ")", "if", "unit_lit", "is", "None", ":", "msg", "=", "\"Unit for Variable was not found for resource {0}\"", ".", "format", "(", "self", ".", "root_uri", ")", "raise", "GenericResourceMeta", ".", "ResourceMetaException", "(", "msg", ")", "var", ".", "unit", "=", "str", "(", "unit_lit", ")", "# Get longName", "long_name_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "longName", ")", "if", "long_name_lit", ":", "var", ".", "longName", "=", "str", "(", "long_name_lit", ")", "# Get comment", "comment_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "comment", ")", "if", "comment_lit", ":", "var", ".", "comment", "=", "str", "(", "comment_lit", ")", "# Get missingValue", "missing_val_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "missingValue", ")", "if", "missing_val_lit", ":", "var", ".", "missingValue", "=", "str", "(", "missing_val_lit", ")", "self", ".", "variables", ".", "append", "(", "var", ")", "for", "v", "in", "self", ".", "variables", ":", "print", "(", "\"\\t\\t{0}\"", ".", "format", "(", "str", "(", "v", ")", ")", ")", "# Get spatialReference", "for", "s", ",", "p", ",", "o", "in", "self", ".", "_rmeta_graph", ".", "triples", "(", "(", "None", ",", "hsterms", ".", "spatialReference", ",", "None", ")", ")", ":", "# Get extent", "extent_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "extent", ")", "if", "extent_lit", "is", "None", ":", "msg", "=", "\"Extent not found in spatial reference for resource {0}\"", ".", "format", "(", "self", ".", "root_uri", ")", "raise", "GenericResourceMeta", ".", "ResourceMetaException", "(", "msg", ")", "extent", "=", "str", "(", "extent_lit", ")", "# Get crsName", "crs_name_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "crsName", ")", "crs_name", "=", "None", "if", "crs_name_lit", "is", "not", "None", ":", "crs_name", "=", "str", "(", "crs_name_lit", ")", "# Get crsRepresentationText", "crs_repr_text_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "crsRepresentationText", ")", "crs_repr_text", "=", "None", "if", "crs_repr_text_lit", "is", "not", "None", ":", "crs_repr_text", "=", "str", "(", "crs_repr_text_lit", ")", "# Get crsRepresentationType", "crs_repr_type_lit", "=", "self", ".", "_rmeta_graph", ".", "value", "(", "o", ",", "hsterms", ".", "crsRepresentationType", ")", "crs_repr_type", "=", "None", "if", "crs_repr_type_lit", "is", "not", "None", ":", "crs_repr_type", "=", "str", "(", "crs_repr_type_lit", ")", "self", ".", "spatial_reference", "=", "NetcdfResourceMeta", ".", "SpatialReference", "(", "extent", ",", "crs_name", ",", "crs_repr_text", ",", "crs_repr_type", ")", "print", "(", "\"\\t\\t{0}\"", ".", "format", "(", "self", ".", "spatial_reference", ")", ")" ]
https://github.com/hydroshare/hydroshare/blob/7ba563b55412f283047fb3ef6da367d41dec58c6/hs_app_netCDF/serialization.py#L28-L112
Pymol-Scripts/Pymol-script-repo
bcd7bb7812dc6db1595953dfa4471fa15fb68c77
modules/pdb2pqr/contrib/numpy-1.1.0/numpy/oldnumeric/ma.py
python
resize
(a, new_shape)
return result
resize(a, new_shape) returns a new array with the specified shape. The original array's total size can be any size.
resize(a, new_shape) returns a new array with the specified shape. The original array's total size can be any size.
[ "resize", "(", "a", "new_shape", ")", "returns", "a", "new", "array", "with", "the", "specified", "shape", ".", "The", "original", "array", "s", "total", "size", "can", "be", "any", "size", "." ]
def resize (a, new_shape): """resize(a, new_shape) returns a new array with the specified shape. The original array's total size can be any size.""" m = getmask(a) if m is not nomask: m = fromnumeric.resize(m, new_shape) result = array(fromnumeric.resize(filled(a), new_shape), mask=m) result.set_fill_value(get_fill_value(a)) return result
[ "def", "resize", "(", "a", ",", "new_shape", ")", ":", "m", "=", "getmask", "(", "a", ")", "if", "m", "is", "not", "nomask", ":", "m", "=", "fromnumeric", ".", "resize", "(", "m", ",", "new_shape", ")", "result", "=", "array", "(", "fromnumeric", ".", "resize", "(", "filled", "(", "a", ")", ",", "new_shape", ")", ",", "mask", "=", "m", ")", "result", ".", "set_fill_value", "(", "get_fill_value", "(", "a", ")", ")", "return", "result" ]
https://github.com/Pymol-Scripts/Pymol-script-repo/blob/bcd7bb7812dc6db1595953dfa4471fa15fb68c77/modules/pdb2pqr/contrib/numpy-1.1.0/numpy/oldnumeric/ma.py#L1505-L1513
9miao/G-Firefly
8fbeeb3ef9782600560be48228c91cfb8f5ff87d
gfirefly/gfirefly/distributed/root.py
python
PBRoot.callChildNotForResult
(self,childname,*args,**kw)
调用子节点的接口 @param childId: int 子节点的id return Defered Object
调用子节点的接口
[ "调用子节点的接口" ]
def callChildNotForResult(self,childname,*args,**kw): '''调用子节点的接口 @param childId: int 子节点的id return Defered Object ''' self.childsmanager.callChildNotForResult(childname,*args,**kw)
[ "def", "callChildNotForResult", "(", "self", ",", "childname", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "self", ".", "childsmanager", ".", "callChildNotForResult", "(", "childname", ",", "*", "args", ",", "*", "*", "kw", ")" ]
https://github.com/9miao/G-Firefly/blob/8fbeeb3ef9782600560be48228c91cfb8f5ff87d/gfirefly/gfirefly/distributed/root.py#L112-L117
biolab/orange3
41685e1c7b1d1babe680113685a2d44bcc9fec0b
Orange/clustering/hierarchical.py
python
feature_clustering
(data, distance=PearsonR, linkage=AVERAGE)
return dist_matrix_clustering(matrix, linkage=linkage)
Return the hierarchical clustering of the dataset's columns. :param Orange.data.Table data: Dataset to cluster. :param Orange.distance.Distance distance: A distance measure. :param str linkage:
Return the hierarchical clustering of the dataset's columns.
[ "Return", "the", "hierarchical", "clustering", "of", "the", "dataset", "s", "columns", "." ]
def feature_clustering(data, distance=PearsonR, linkage=AVERAGE): """ Return the hierarchical clustering of the dataset's columns. :param Orange.data.Table data: Dataset to cluster. :param Orange.distance.Distance distance: A distance measure. :param str linkage: """ matrix = distance(data, axis=0) return dist_matrix_clustering(matrix, linkage=linkage)
[ "def", "feature_clustering", "(", "data", ",", "distance", "=", "PearsonR", ",", "linkage", "=", "AVERAGE", ")", ":", "matrix", "=", "distance", "(", "data", ",", "axis", "=", "0", ")", "return", "dist_matrix_clustering", "(", "matrix", ",", "linkage", "=", "linkage", ")" ]
https://github.com/biolab/orange3/blob/41685e1c7b1d1babe680113685a2d44bcc9fec0b/Orange/clustering/hierarchical.py#L74-L84
sefakilic/goodreads
5187100d66b87c87db8b095bbf576e1b92422a23
goodreads/client.py
python
GoodreadsClient.find_author
(self, author_name)
return self.author(resp['author']['@id']) if 'author' in resp else None
Find an author by name
Find an author by name
[ "Find", "an", "author", "by", "name" ]
def find_author(self, author_name): """Find an author by name""" resp = self.request("api/author_url/%s" % author_name, {}) return self.author(resp['author']['@id']) if 'author' in resp else None
[ "def", "find_author", "(", "self", ",", "author_name", ")", ":", "resp", "=", "self", ".", "request", "(", "\"api/author_url/%s\"", "%", "author_name", ",", "{", "}", ")", "return", "self", ".", "author", "(", "resp", "[", "'author'", "]", "[", "'@id'", "]", ")", "if", "'author'", "in", "resp", "else", "None" ]
https://github.com/sefakilic/goodreads/blob/5187100d66b87c87db8b095bbf576e1b92422a23/goodreads/client.py#L83-L86
AppScale/gts
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
AdminServer/appscale/admin/utils.py
python
remove_old_archives
(project_id, service_id, version)
Cleans up old revision archives. Args: project_id: A string specifying a project ID. service_id: A string specifying a service ID. version: A dictionary containing version details.
Cleans up old revision archives.
[ "Cleans", "up", "old", "revision", "archives", "." ]
def remove_old_archives(project_id, service_id, version): """ Cleans up old revision archives. Args: project_id: A string specifying a project ID. service_id: A string specifying a service ID. version: A dictionary containing version details. """ prefix = VERSION_PATH_SEPARATOR.join( [project_id, service_id, version['id']]) current_name = os.path.basename(version['deployment']['zip']['sourceUrl']) old_sources = [os.path.join(SOURCES_DIRECTORY, archive) for archive in os.listdir(SOURCES_DIRECTORY) if archive.startswith(prefix) and archive < current_name] for archive in old_sources: os.remove(archive)
[ "def", "remove_old_archives", "(", "project_id", ",", "service_id", ",", "version", ")", ":", "prefix", "=", "VERSION_PATH_SEPARATOR", ".", "join", "(", "[", "project_id", ",", "service_id", ",", "version", "[", "'id'", "]", "]", ")", "current_name", "=", "os", ".", "path", ".", "basename", "(", "version", "[", "'deployment'", "]", "[", "'zip'", "]", "[", "'sourceUrl'", "]", ")", "old_sources", "=", "[", "os", ".", "path", ".", "join", "(", "SOURCES_DIRECTORY", ",", "archive", ")", "for", "archive", "in", "os", ".", "listdir", "(", "SOURCES_DIRECTORY", ")", "if", "archive", ".", "startswith", "(", "prefix", ")", "and", "archive", "<", "current_name", "]", "for", "archive", "in", "old_sources", ":", "os", ".", "remove", "(", "archive", ")" ]
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AdminServer/appscale/admin/utils.py#L290-L305
oracle/graalpython
577e02da9755d916056184ec441c26e00b70145c
graalpython/lib-python/3/encodings/iso8859_15.py
python
IncrementalEncoder.encode
(self, input, final=False)
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
[]
def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0]
[ "def", "encode", "(", "self", ",", "input", ",", "final", "=", "False", ")", ":", "return", "codecs", ".", "charmap_encode", "(", "input", ",", "self", ".", "errors", ",", "encoding_table", ")", "[", "0", "]" ]
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/encodings/iso8859_15.py#L18-L19
pymedusa/Medusa
1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38
ext/tmdbsimple/movies.py
python
Keywords.info
(self, **kwargs)
return response
Get the details of a keyword. Args: None Returns: A dict representation of the JSON returned from the API.
Get the details of a keyword.
[ "Get", "the", "details", "of", "a", "keyword", "." ]
def info(self, **kwargs): """ Get the details of a keyword. Args: None Returns: A dict representation of the JSON returned from the API. """ path = self._get_id_path('info') response = self._GET(path, kwargs) self._set_attrs_to_values(response) return response
[ "def", "info", "(", "self", ",", "*", "*", "kwargs", ")", ":", "path", "=", "self", ".", "_get_id_path", "(", "'info'", ")", "response", "=", "self", ".", "_GET", "(", "path", ",", "kwargs", ")", "self", ".", "_set_attrs_to_values", "(", "response", ")", "return", "response" ]
https://github.com/pymedusa/Medusa/blob/1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38/ext/tmdbsimple/movies.py#L690-L704
wasidennis/AdaptSegNet
fca9ff0f09dab45d44bf6d26091377ac66607028
evaluate_cityscapes.py
python
main
()
Create the model and start the evaluation process.
Create the model and start the evaluation process.
[ "Create", "the", "model", "and", "start", "the", "evaluation", "process", "." ]
def main(): """Create the model and start the evaluation process.""" args = get_arguments() gpu0 = args.gpu if not os.path.exists(args.save): os.makedirs(args.save) if args.model == 'DeeplabMulti': model = DeeplabMulti(num_classes=args.num_classes) elif args.model == 'Oracle': model = Res_Deeplab(num_classes=args.num_classes) if args.restore_from == RESTORE_FROM: args.restore_from = RESTORE_FROM_ORC elif args.model == 'DeeplabVGG': model = DeeplabVGG(num_classes=args.num_classes) if args.restore_from == RESTORE_FROM: args.restore_from = RESTORE_FROM_VGG if args.restore_from[:4] == 'http' : saved_state_dict = model_zoo.load_url(args.restore_from) else: saved_state_dict = torch.load(args.restore_from) ### for running different versions of pytorch model_dict = model.state_dict() saved_state_dict = {k: v for k, v in saved_state_dict.items() if k in model_dict} model_dict.update(saved_state_dict) ### model.load_state_dict(saved_state_dict) model.eval() model.cuda(gpu0) testloader = data.DataLoader(cityscapesDataSet(args.data_dir, args.data_list, crop_size=(1024, 512), mean=IMG_MEAN, scale=False, mirror=False, set=args.set), batch_size=1, shuffle=False, pin_memory=True) if version.parse(torch.__version__) >= version.parse('0.4.0'): interp = nn.Upsample(size=(1024, 2048), mode='bilinear', align_corners=True) else: interp = nn.Upsample(size=(1024, 2048), mode='bilinear') for index, batch in enumerate(testloader): if index % 100 == 0: print '%d processd' % index image, _, name = batch if args.model == 'DeeplabMulti': output1, output2 = model(Variable(image, volatile=True).cuda(gpu0)) output = interp(output2).cpu().data[0].numpy() elif args.model == 'DeeplabVGG' or args.model == 'Oracle': output = model(Variable(image, volatile=True).cuda(gpu0)) output = interp(output).cpu().data[0].numpy() output = output.transpose(1,2,0) output = np.asarray(np.argmax(output, axis=2), dtype=np.uint8) output_col = colorize_mask(output) output = Image.fromarray(output) name = name[0].split('/')[-1] output.save('%s/%s' % (args.save, name)) output_col.save('%s/%s_color.png' % (args.save, name.split('.')[0]))
[ "def", "main", "(", ")", ":", "args", "=", "get_arguments", "(", ")", "gpu0", "=", "args", ".", "gpu", "if", "not", "os", ".", "path", ".", "exists", "(", "args", ".", "save", ")", ":", "os", ".", "makedirs", "(", "args", ".", "save", ")", "if", "args", ".", "model", "==", "'DeeplabMulti'", ":", "model", "=", "DeeplabMulti", "(", "num_classes", "=", "args", ".", "num_classes", ")", "elif", "args", ".", "model", "==", "'Oracle'", ":", "model", "=", "Res_Deeplab", "(", "num_classes", "=", "args", ".", "num_classes", ")", "if", "args", ".", "restore_from", "==", "RESTORE_FROM", ":", "args", ".", "restore_from", "=", "RESTORE_FROM_ORC", "elif", "args", ".", "model", "==", "'DeeplabVGG'", ":", "model", "=", "DeeplabVGG", "(", "num_classes", "=", "args", ".", "num_classes", ")", "if", "args", ".", "restore_from", "==", "RESTORE_FROM", ":", "args", ".", "restore_from", "=", "RESTORE_FROM_VGG", "if", "args", ".", "restore_from", "[", ":", "4", "]", "==", "'http'", ":", "saved_state_dict", "=", "model_zoo", ".", "load_url", "(", "args", ".", "restore_from", ")", "else", ":", "saved_state_dict", "=", "torch", ".", "load", "(", "args", ".", "restore_from", ")", "### for running different versions of pytorch", "model_dict", "=", "model", ".", "state_dict", "(", ")", "saved_state_dict", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "saved_state_dict", ".", "items", "(", ")", "if", "k", "in", "model_dict", "}", "model_dict", ".", "update", "(", "saved_state_dict", ")", "###", "model", ".", "load_state_dict", "(", "saved_state_dict", ")", "model", ".", "eval", "(", ")", "model", ".", "cuda", "(", "gpu0", ")", "testloader", "=", "data", ".", "DataLoader", "(", "cityscapesDataSet", "(", "args", ".", "data_dir", ",", "args", ".", "data_list", ",", "crop_size", "=", "(", "1024", ",", "512", ")", ",", "mean", "=", "IMG_MEAN", ",", "scale", "=", "False", ",", "mirror", "=", "False", ",", "set", "=", "args", ".", "set", ")", ",", "batch_size", "=", "1", ",", "shuffle", "=", "False", ",", "pin_memory", "=", "True", ")", "if", "version", ".", "parse", "(", "torch", ".", "__version__", ")", ">=", "version", ".", "parse", "(", "'0.4.0'", ")", ":", "interp", "=", "nn", ".", "Upsample", "(", "size", "=", "(", "1024", ",", "2048", ")", ",", "mode", "=", "'bilinear'", ",", "align_corners", "=", "True", ")", "else", ":", "interp", "=", "nn", ".", "Upsample", "(", "size", "=", "(", "1024", ",", "2048", ")", ",", "mode", "=", "'bilinear'", ")", "for", "index", ",", "batch", "in", "enumerate", "(", "testloader", ")", ":", "if", "index", "%", "100", "==", "0", ":", "print", "'%d processd'", "%", "index", "image", ",", "_", ",", "name", "=", "batch", "if", "args", ".", "model", "==", "'DeeplabMulti'", ":", "output1", ",", "output2", "=", "model", "(", "Variable", "(", "image", ",", "volatile", "=", "True", ")", ".", "cuda", "(", "gpu0", ")", ")", "output", "=", "interp", "(", "output2", ")", ".", "cpu", "(", ")", ".", "data", "[", "0", "]", ".", "numpy", "(", ")", "elif", "args", ".", "model", "==", "'DeeplabVGG'", "or", "args", ".", "model", "==", "'Oracle'", ":", "output", "=", "model", "(", "Variable", "(", "image", ",", "volatile", "=", "True", ")", ".", "cuda", "(", "gpu0", ")", ")", "output", "=", "interp", "(", "output", ")", ".", "cpu", "(", ")", ".", "data", "[", "0", "]", ".", "numpy", "(", ")", "output", "=", "output", ".", "transpose", "(", "1", ",", "2", ",", "0", ")", "output", "=", "np", ".", "asarray", "(", "np", ".", "argmax", "(", "output", ",", "axis", "=", "2", ")", ",", "dtype", "=", "np", ".", "uint8", ")", "output_col", "=", "colorize_mask", "(", "output", ")", "output", "=", "Image", ".", "fromarray", "(", "output", ")", "name", "=", "name", "[", "0", "]", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", "output", ".", "save", "(", "'%s/%s'", "%", "(", "args", ".", "save", ",", "name", ")", ")", "output_col", ".", "save", "(", "'%s/%s_color.png'", "%", "(", "args", ".", "save", ",", "name", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ")" ]
https://github.com/wasidennis/AdaptSegNet/blob/fca9ff0f09dab45d44bf6d26091377ac66607028/evaluate_cityscapes.py#L82-L145
home-assistant/core
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
homeassistant/util/yaml/loader.py
python
Secrets._load_secret_yaml
(self, secret_dir: Path)
return secrets
Load the secrets yaml from path.
Load the secrets yaml from path.
[ "Load", "the", "secrets", "yaml", "from", "path", "." ]
def _load_secret_yaml(self, secret_dir: Path) -> dict[str, str]: """Load the secrets yaml from path.""" if (secret_path := secret_dir / SECRET_YAML) in self._cache: return self._cache[secret_path] _LOGGER.debug("Loading %s", secret_path) try: secrets = load_yaml(str(secret_path)) if not isinstance(secrets, dict): raise HomeAssistantError("Secrets is not a dictionary") if "logger" in secrets: logger = str(secrets["logger"]).lower() if logger == "debug": _LOGGER.setLevel(logging.DEBUG) else: _LOGGER.error( "Error in secrets.yaml: 'logger: debug' expected, but 'logger: %s' found", logger, ) del secrets["logger"] except FileNotFoundError: secrets = {} self._cache[secret_path] = secrets return secrets
[ "def", "_load_secret_yaml", "(", "self", ",", "secret_dir", ":", "Path", ")", "->", "dict", "[", "str", ",", "str", "]", ":", "if", "(", "secret_path", ":=", "secret_dir", "/", "SECRET_YAML", ")", "in", "self", ".", "_cache", ":", "return", "self", ".", "_cache", "[", "secret_path", "]", "_LOGGER", ".", "debug", "(", "\"Loading %s\"", ",", "secret_path", ")", "try", ":", "secrets", "=", "load_yaml", "(", "str", "(", "secret_path", ")", ")", "if", "not", "isinstance", "(", "secrets", ",", "dict", ")", ":", "raise", "HomeAssistantError", "(", "\"Secrets is not a dictionary\"", ")", "if", "\"logger\"", "in", "secrets", ":", "logger", "=", "str", "(", "secrets", "[", "\"logger\"", "]", ")", ".", "lower", "(", ")", "if", "logger", "==", "\"debug\"", ":", "_LOGGER", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "else", ":", "_LOGGER", ".", "error", "(", "\"Error in secrets.yaml: 'logger: debug' expected, but 'logger: %s' found\"", ",", "logger", ",", ")", "del", "secrets", "[", "\"logger\"", "]", "except", "FileNotFoundError", ":", "secrets", "=", "{", "}", "self", ".", "_cache", "[", "secret_path", "]", "=", "secrets", "return", "secrets" ]
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/util/yaml/loader.py#L61-L88
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/physics/quantum/boson.py
python
BosonOp._eval_commutator_BosonOp
(self, other, **hints)
return None
[]
def _eval_commutator_BosonOp(self, other, **hints): if self.name == other.name: # [a^\dagger, a] = -1 if not self.is_annihilation and other.is_annihilation: return Integer(-1) elif 'independent' in hints and hints['independent']: # [a, b] = 0 return Integer(0) return None
[ "def", "_eval_commutator_BosonOp", "(", "self", ",", "other", ",", "*", "*", "hints", ")", ":", "if", "self", ".", "name", "==", "other", ".", "name", ":", "# [a^\\dagger, a] = -1", "if", "not", "self", ".", "is_annihilation", "and", "other", ".", "is_annihilation", ":", "return", "Integer", "(", "-", "1", ")", "elif", "'independent'", "in", "hints", "and", "hints", "[", "'independent'", "]", ":", "# [a, b] = 0", "return", "Integer", "(", "0", ")", "return", "None" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/physics/quantum/boson.py#L68-L78
translate/translate
72816df696b5263abfe80ab59129b299b85ae749
translate/storage/html.py
python
htmlfile.handle_charref
(self, name)
Handle entries in the form &#NNNN; e.g. &#8417;
Handle entries in the form &#NNNN; e.g. &#8417;
[ "Handle", "entries", "in", "the", "form", "&#NNNN", ";", "e", ".", "g", ".", "&#8417", ";" ]
def handle_charref(self, name): """Handle entries in the form &#NNNN; e.g. &#8417;""" if name.lower().startswith("x"): self.handle_data(chr(int(name[1:], 16))) else: self.handle_data(chr(int(name)))
[ "def", "handle_charref", "(", "self", ",", "name", ")", ":", "if", "name", ".", "lower", "(", ")", ".", "startswith", "(", "\"x\"", ")", ":", "self", ".", "handle_data", "(", "chr", "(", "int", "(", "name", "[", "1", ":", "]", ",", "16", ")", ")", ")", "else", ":", "self", ".", "handle_data", "(", "chr", "(", "int", "(", "name", ")", ")", ")" ]
https://github.com/translate/translate/blob/72816df696b5263abfe80ab59129b299b85ae749/translate/storage/html.py#L461-L466
qutebrowser/qutebrowser
3a2aaaacbf97f4bf0c72463f3da94ed2822a5442
qutebrowser/api/cmdutils.py
python
check_exclusive
(flags: Iterable[bool], names: Iterable[str])
Check if only one flag is set with exclusive flags. Raise a CommandError if not. Args: flags: The flag values to check. names: A list of names (corresponding to the flags argument).
Check if only one flag is set with exclusive flags.
[ "Check", "if", "only", "one", "flag", "is", "set", "with", "exclusive", "flags", "." ]
def check_exclusive(flags: Iterable[bool], names: Iterable[str]) -> None: """Check if only one flag is set with exclusive flags. Raise a CommandError if not. Args: flags: The flag values to check. names: A list of names (corresponding to the flags argument). """ if sum(1 for e in flags if e) > 1: argstr = '/'.join('-' + e for e in names) raise CommandError("Only one of {} can be given!".format(argstr))
[ "def", "check_exclusive", "(", "flags", ":", "Iterable", "[", "bool", "]", ",", "names", ":", "Iterable", "[", "str", "]", ")", "->", "None", ":", "if", "sum", "(", "1", "for", "e", "in", "flags", "if", "e", ")", ">", "1", ":", "argstr", "=", "'/'", ".", "join", "(", "'-'", "+", "e", "for", "e", "in", "names", ")", "raise", "CommandError", "(", "\"Only one of {} can be given!\"", ".", "format", "(", "argstr", ")", ")" ]
https://github.com/qutebrowser/qutebrowser/blob/3a2aaaacbf97f4bf0c72463f3da94ed2822a5442/qutebrowser/api/cmdutils.py#L94-L105
FederatedAI/FATE
32540492623568ecd1afcb367360133616e02fa3
python/federatedml/ensemble/basic_algorithms/decision_tree/tree_core/g_h_optim.py
python
GHPacker.fixedpoint_encode
(num, mul)
return int_fixpoint
[]
def fixedpoint_encode(num, mul): int_fixpoint = int(round(num * mul)) return int_fixpoint
[ "def", "fixedpoint_encode", "(", "num", ",", "mul", ")", ":", "int_fixpoint", "=", "int", "(", "round", "(", "num", "*", "mul", ")", ")", "return", "int_fixpoint" ]
https://github.com/FederatedAI/FATE/blob/32540492623568ecd1afcb367360133616e02fa3/python/federatedml/ensemble/basic_algorithms/decision_tree/tree_core/g_h_optim.py#L95-L97
cloudera/impyla
0c736af4cad2bade9b8e313badc08ec50e81c948
impala/_thrift_gen/hive_metastore/ThriftHiveMetastore.py
python
get_function_result.write
(self, oprot)
[]
def write(self, oprot): if oprot._fast_encode is not None and self.thrift_spec is not None: oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) return oprot.writeStructBegin('get_function_result') if self.success is not None: oprot.writeFieldBegin('success', TType.STRUCT, 0) self.success.write(oprot) oprot.writeFieldEnd() if self.o1 is not None: oprot.writeFieldBegin('o1', TType.STRUCT, 1) self.o1.write(oprot) oprot.writeFieldEnd() if self.o2 is not None: oprot.writeFieldBegin('o2', TType.STRUCT, 2) self.o2.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd()
[ "def", "write", "(", "self", ",", "oprot", ")", ":", "if", "oprot", ".", "_fast_encode", "is", "not", "None", "and", "self", ".", "thrift_spec", "is", "not", "None", ":", "oprot", ".", "trans", ".", "write", "(", "oprot", ".", "_fast_encode", "(", "self", ",", "[", "self", ".", "__class__", ",", "self", ".", "thrift_spec", "]", ")", ")", "return", "oprot", ".", "writeStructBegin", "(", "'get_function_result'", ")", "if", "self", ".", "success", "is", "not", "None", ":", "oprot", ".", "writeFieldBegin", "(", "'success'", ",", "TType", ".", "STRUCT", ",", "0", ")", "self", ".", "success", ".", "write", "(", "oprot", ")", "oprot", ".", "writeFieldEnd", "(", ")", "if", "self", ".", "o1", "is", "not", "None", ":", "oprot", ".", "writeFieldBegin", "(", "'o1'", ",", "TType", ".", "STRUCT", ",", "1", ")", "self", ".", "o1", ".", "write", "(", "oprot", ")", "oprot", ".", "writeFieldEnd", "(", ")", "if", "self", ".", "o2", "is", "not", "None", ":", "oprot", ".", "writeFieldBegin", "(", "'o2'", ",", "TType", ".", "STRUCT", ",", "2", ")", "self", ".", "o2", ".", "write", "(", "oprot", ")", "oprot", ".", "writeFieldEnd", "(", ")", "oprot", ".", "writeFieldStop", "(", ")", "oprot", ".", "writeStructEnd", "(", ")" ]
https://github.com/cloudera/impyla/blob/0c736af4cad2bade9b8e313badc08ec50e81c948/impala/_thrift_gen/hive_metastore/ThriftHiveMetastore.py#L28116-L28134
f-dangel/backpack
1da7e53ebb2c490e2b7dd9f79116583641f3cca1
backpack/extensions/secondorder/sqrt_ggn/dropout.py
python
SqrtGGNDropout.__init__
(self)
Pass derivatives for ``torch.nn.Dropout`` module.
Pass derivatives for ``torch.nn.Dropout`` module.
[ "Pass", "derivatives", "for", "torch", ".", "nn", ".", "Dropout", "module", "." ]
def __init__(self): """Pass derivatives for ``torch.nn.Dropout`` module.""" super().__init__(DropoutDerivatives())
[ "def", "__init__", "(", "self", ")", ":", "super", "(", ")", ".", "__init__", "(", "DropoutDerivatives", "(", ")", ")" ]
https://github.com/f-dangel/backpack/blob/1da7e53ebb2c490e2b7dd9f79116583641f3cca1/backpack/extensions/secondorder/sqrt_ggn/dropout.py#L9-L11
Squarespace/pgbedrock
58e46f98da72e81ae0c2f69463aa6e97ea7b8936
pgbedrock/privileges.py
python
PrivilegeAnalyzer.analyze_defaults
(self)
Analyze default privileges. Note that we sort the grants / revokes before issuing them so the output will be more organized, making it easier for the end user to read
Analyze default privileges. Note that we sort the grants / revokes before issuing them so the output will be more organized, making it easier for the end user to read
[ "Analyze", "default", "privileges", ".", "Note", "that", "we", "sort", "the", "grants", "/", "revokes", "before", "issuing", "them", "so", "the", "output", "will", "be", "more", "organized", "making", "it", "easier", "for", "the", "end", "user", "to", "read" ]
def analyze_defaults(self): """ Analyze default privileges. Note that we sort the grants / revokes before issuing them so the output will be more organized, making it easier for the end user to read """ defaults_to_grant = self.desired_defaults.difference(self.current_defaults) logger.debug('defaults_to_grant: {}'.format(defaults_to_grant)) for grantor, schema, pg_priv_kind in sorted(defaults_to_grant): self.grant_default(grantor, schema, pg_priv_kind) defaults_to_revoke = self.current_defaults.difference(self.desired_defaults) logger.debug('defaults_to_revoke: {}'.format(defaults_to_revoke)) for grantor, schema, pg_priv_kind in sorted(defaults_to_revoke): self.revoke_default(grantor, schema, pg_priv_kind)
[ "def", "analyze_defaults", "(", "self", ")", ":", "defaults_to_grant", "=", "self", ".", "desired_defaults", ".", "difference", "(", "self", ".", "current_defaults", ")", "logger", ".", "debug", "(", "'defaults_to_grant: {}'", ".", "format", "(", "defaults_to_grant", ")", ")", "for", "grantor", ",", "schema", ",", "pg_priv_kind", "in", "sorted", "(", "defaults_to_grant", ")", ":", "self", ".", "grant_default", "(", "grantor", ",", "schema", ",", "pg_priv_kind", ")", "defaults_to_revoke", "=", "self", ".", "current_defaults", ".", "difference", "(", "self", ".", "desired_defaults", ")", "logger", ".", "debug", "(", "'defaults_to_revoke: {}'", ".", "format", "(", "defaults_to_revoke", ")", ")", "for", "grantor", ",", "schema", ",", "pg_priv_kind", "in", "sorted", "(", "defaults_to_revoke", ")", ":", "self", ".", "revoke_default", "(", "grantor", ",", "schema", ",", "pg_priv_kind", ")" ]
https://github.com/Squarespace/pgbedrock/blob/58e46f98da72e81ae0c2f69463aa6e97ea7b8936/pgbedrock/privileges.py#L214-L225
PetterKraabol/Twitch-Chat-Downloader
7d8b00d1836cbb804489a75b57d6af131fc2cc55
tcd/logger.py
python
Logger.log
(self, message: str = '', log_type: str = Log.REGULAR, retain: bool = True)
return log
Log a message :param message: Log message :param log_type: Log type :param retain: Save log to memory :return: None
Log a message :param message: Log message :param log_type: Log type :param retain: Save log to memory :return: None
[ "Log", "a", "message", ":", "param", "message", ":", "Log", "message", ":", "param", "log_type", ":", "Log", "type", ":", "param", "retain", ":", "Save", "log", "to", "memory", ":", "return", ":", "None" ]
def log(self, message: str = '', log_type: str = Log.REGULAR, retain: bool = True) -> Log: """ Log a message :param message: Log message :param log_type: Log type :param retain: Save log to memory :return: None """ # Add log to log = Log(message, log_type) # Save log entry to memory if retain and log.type is not Log.PREVIEW: self.logs.append(log) # Save log when debugging if Arguments().log: self.save() # Print if self.should_print_type(log.type): print(log) return log
[ "def", "log", "(", "self", ",", "message", ":", "str", "=", "''", ",", "log_type", ":", "str", "=", "Log", ".", "REGULAR", ",", "retain", ":", "bool", "=", "True", ")", "->", "Log", ":", "# Add log to", "log", "=", "Log", "(", "message", ",", "log_type", ")", "# Save log entry to memory", "if", "retain", "and", "log", ".", "type", "is", "not", "Log", ".", "PREVIEW", ":", "self", ".", "logs", ".", "append", "(", "log", ")", "# Save log when debugging", "if", "Arguments", "(", ")", ".", "log", ":", "self", ".", "save", "(", ")", "# Print", "if", "self", ".", "should_print_type", "(", "log", ".", "type", ")", ":", "print", "(", "log", ")", "return", "log" ]
https://github.com/PetterKraabol/Twitch-Chat-Downloader/blob/7d8b00d1836cbb804489a75b57d6af131fc2cc55/tcd/logger.py#L42-L65
Tautulli/Tautulli
2410eb33805aaac4bd1c5dad0f71e4f15afaf742
lib/dns/resolver.py
python
Cache.put
(self, key, value)
Associate key and value in the cache. *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the query name, rdtype, and rdclass respectively. *value*, a ``dns.resolver.Answer``, the answer.
Associate key and value in the cache.
[ "Associate", "key", "and", "value", "in", "the", "cache", "." ]
def put(self, key, value): """Associate key and value in the cache. *key*, a ``(dns.name.Name, int, int)`` tuple whose values are the query name, rdtype, and rdclass respectively. *value*, a ``dns.resolver.Answer``, the answer. """ with self.lock: self._maybe_clean() self.data[key] = value
[ "def", "put", "(", "self", ",", "key", ",", "value", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "_maybe_clean", "(", ")", "self", ".", "data", "[", "key", "]", "=", "value" ]
https://github.com/Tautulli/Tautulli/blob/2410eb33805aaac4bd1c5dad0f71e4f15afaf742/lib/dns/resolver.py#L331-L342
dimagi/commcare-hq
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
corehq/apps/locations/management/commands/fix_loc_type_reference.py
python
has_bad_location_types
(domain)
return (SQLLocation.objects .filter(domain=domain) .exclude(location_type__domain=domain) .exists())
[]
def has_bad_location_types(domain): return (SQLLocation.objects .filter(domain=domain) .exclude(location_type__domain=domain) .exists())
[ "def", "has_bad_location_types", "(", "domain", ")", ":", "return", "(", "SQLLocation", ".", "objects", ".", "filter", "(", "domain", "=", "domain", ")", ".", "exclude", "(", "location_type__domain", "=", "domain", ")", ".", "exists", "(", ")", ")" ]
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/locations/management/commands/fix_loc_type_reference.py#L57-L61
jd/tenacity
005ef22015ec4fda8646bf9dce7aaa1a4fa121d2
tenacity/__init__.py
python
retry
(*dargs: t.Any, **dkw: t.Any)
Wrap a function with a new `Retrying` object. :param dargs: positional arguments passed to Retrying object :param dkw: keyword arguments passed to the Retrying object
Wrap a function with a new `Retrying` object.
[ "Wrap", "a", "function", "with", "a", "new", "Retrying", "object", "." ]
def retry(*dargs: t.Any, **dkw: t.Any) -> t.Union[WrappedFn, t.Callable[[WrappedFn], WrappedFn]]: # noqa """Wrap a function with a new `Retrying` object. :param dargs: positional arguments passed to Retrying object :param dkw: keyword arguments passed to the Retrying object """ # support both @retry and @retry() as valid syntax if len(dargs) == 1 and callable(dargs[0]): return retry()(dargs[0]) else: def wrap(f: WrappedFn) -> WrappedFn: if isinstance(f, retry_base): warnings.warn( f"Got retry_base instance ({f.__class__.__name__}) as callable argument, " f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)" ) if iscoroutinefunction(f): r: "BaseRetrying" = AsyncRetrying(*dargs, **dkw) elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f): r = TornadoRetrying(*dargs, **dkw) else: r = Retrying(*dargs, **dkw) return r.wraps(f) return wrap
[ "def", "retry", "(", "*", "dargs", ":", "t", ".", "Any", ",", "*", "*", "dkw", ":", "t", ".", "Any", ")", "->", "t", ".", "Union", "[", "WrappedFn", ",", "t", ".", "Callable", "[", "[", "WrappedFn", "]", ",", "WrappedFn", "]", "]", ":", "# noqa", "# support both @retry and @retry() as valid syntax", "if", "len", "(", "dargs", ")", "==", "1", "and", "callable", "(", "dargs", "[", "0", "]", ")", ":", "return", "retry", "(", ")", "(", "dargs", "[", "0", "]", ")", "else", ":", "def", "wrap", "(", "f", ":", "WrappedFn", ")", "->", "WrappedFn", ":", "if", "isinstance", "(", "f", ",", "retry_base", ")", ":", "warnings", ".", "warn", "(", "f\"Got retry_base instance ({f.__class__.__name__}) as callable argument, \"", "f\"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)\"", ")", "if", "iscoroutinefunction", "(", "f", ")", ":", "r", ":", "\"BaseRetrying\"", "=", "AsyncRetrying", "(", "*", "dargs", ",", "*", "*", "dkw", ")", "elif", "tornado", "and", "hasattr", "(", "tornado", ".", "gen", ",", "\"is_coroutine_function\"", ")", "and", "tornado", ".", "gen", ".", "is_coroutine_function", "(", "f", ")", ":", "r", "=", "TornadoRetrying", "(", "*", "dargs", ",", "*", "*", "dkw", ")", "else", ":", "r", "=", "Retrying", "(", "*", "dargs", ",", "*", "*", "dkw", ")", "return", "r", ".", "wraps", "(", "f", ")", "return", "wrap" ]
https://github.com/jd/tenacity/blob/005ef22015ec4fda8646bf9dce7aaa1a4fa121d2/tenacity/__init__.py#L105-L131
MacHu-GWU/uszipcode-project
d5ca6d7bd0544043dfc8fee3393ee17e1c96c01d
uszipcode/search.py
python
SearchEngine.by_land_area_in_sqmi
( self, lower: int = -1, upper: int = 2 ** 31, zipcode_type: ZipcodeTypeEnum = ZipcodeTypeEnum.Standard, sort_by: str = SimpleZipcode.land_area_in_sqmi.name, ascending: bool = False, returns: int = DEFAULT_LIMIT, )
return self.query( land_area_in_sqmi_lower=lower, land_area_in_sqmi_upper=upper, sort_by=sort_by, zipcode_type=zipcode_type, ascending=ascending, returns=returns, )
Search zipcode information by land area / sq miles range.
Search zipcode information by land area / sq miles range.
[ "Search", "zipcode", "information", "by", "land", "area", "/", "sq", "miles", "range", "." ]
def by_land_area_in_sqmi( self, lower: int = -1, upper: int = 2 ** 31, zipcode_type: ZipcodeTypeEnum = ZipcodeTypeEnum.Standard, sort_by: str = SimpleZipcode.land_area_in_sqmi.name, ascending: bool = False, returns: int = DEFAULT_LIMIT, ): """ Search zipcode information by land area / sq miles range. """ return self.query( land_area_in_sqmi_lower=lower, land_area_in_sqmi_upper=upper, sort_by=sort_by, zipcode_type=zipcode_type, ascending=ascending, returns=returns, )
[ "def", "by_land_area_in_sqmi", "(", "self", ",", "lower", ":", "int", "=", "-", "1", ",", "upper", ":", "int", "=", "2", "**", "31", ",", "zipcode_type", ":", "ZipcodeTypeEnum", "=", "ZipcodeTypeEnum", ".", "Standard", ",", "sort_by", ":", "str", "=", "SimpleZipcode", ".", "land_area_in_sqmi", ".", "name", ",", "ascending", ":", "bool", "=", "False", ",", "returns", ":", "int", "=", "DEFAULT_LIMIT", ",", ")", ":", "return", "self", ".", "query", "(", "land_area_in_sqmi_lower", "=", "lower", ",", "land_area_in_sqmi_upper", "=", "upper", ",", "sort_by", "=", "sort_by", ",", "zipcode_type", "=", "zipcode_type", ",", "ascending", "=", "ascending", ",", "returns", "=", "returns", ",", ")" ]
https://github.com/MacHu-GWU/uszipcode-project/blob/d5ca6d7bd0544043dfc8fee3393ee17e1c96c01d/uszipcode/search.py#L882-L899
ros/ros
93d8da32091b8b43702eab5d3202f4511dfeb7dc
tools/rosunit/src/rosunit/pmon.py
python
shutdown_process_monitor
(process_monitor)
@param process_monitor: process monitor to kill @type process_monitor: L{ProcessMonitor} @return: True if process_monitor was successfully shutdown. False if it could not be shutdown cleanly or if there is a problem with process_monitor parameter. shutdown_process_monitor() does not throw any exceptions as this is shutdown-critical code. @rtype: bool
[]
def shutdown_process_monitor(process_monitor): """ @param process_monitor: process monitor to kill @type process_monitor: L{ProcessMonitor} @return: True if process_monitor was successfully shutdown. False if it could not be shutdown cleanly or if there is a problem with process_monitor parameter. shutdown_process_monitor() does not throw any exceptions as this is shutdown-critical code. @rtype: bool """ try: if process_monitor is None or process_monitor.is_shutdown: return False process_monitor.shutdown() process_monitor.join(20.0) if process_monitor.isAlive(): return False else: return True except Exception: return False
[ "def", "shutdown_process_monitor", "(", "process_monitor", ")", ":", "try", ":", "if", "process_monitor", "is", "None", "or", "process_monitor", ".", "is_shutdown", ":", "return", "False", "process_monitor", ".", "shutdown", "(", ")", "process_monitor", ".", "join", "(", "20.0", ")", "if", "process_monitor", ".", "isAlive", "(", ")", ":", "return", "False", "else", ":", "return", "True", "except", "Exception", ":", "return", "False" ]
https://github.com/ros/ros/blob/93d8da32091b8b43702eab5d3202f4511dfeb7dc/tools/rosunit/src/rosunit/pmon.py#L92-L114
simetenn/uncertainpy
ffb2400289743066265b9a8561cdf3b72e478a28
src/uncertainpy/features/efel_features.py
python
EfelFeatures.reference_feature
(self, time, values, info)
return time, values
An example of an Efel feature. Efel feature functions have the following requirements, and the given parameters must either be returned by ``model.run`` or ``features.preprocess``. Parameters ---------- time : {None, numpy.nan, array_like} Time values of the model. If no time values it is None or numpy.nan. values : array_like Result of the model. info : dictionary A dictionary with info["stimulus_start"] and info["stimulus_end"] set. Returns ------- time : None No mean Efel feature has time values, so None is returned instead. values : array_like The feature results, `values`. Returns None if there are no feature results and that evaluation are disregarded. See also -------- uncertainpy.features.Features.preprocess : The features preprocess method. uncertainpy.models.Model.run : The model run method
An example of an Efel feature. Efel feature functions have the following requirements, and the given parameters must either be returned by ``model.run`` or ``features.preprocess``.
[ "An", "example", "of", "an", "Efel", "feature", ".", "Efel", "feature", "functions", "have", "the", "following", "requirements", "and", "the", "given", "parameters", "must", "either", "be", "returned", "by", "model", ".", "run", "or", "features", ".", "preprocess", "." ]
def reference_feature(self, time, values, info): """ An example of an Efel feature. Efel feature functions have the following requirements, and the given parameters must either be returned by ``model.run`` or ``features.preprocess``. Parameters ---------- time : {None, numpy.nan, array_like} Time values of the model. If no time values it is None or numpy.nan. values : array_like Result of the model. info : dictionary A dictionary with info["stimulus_start"] and info["stimulus_end"] set. Returns ------- time : None No mean Efel feature has time values, so None is returned instead. values : array_like The feature results, `values`. Returns None if there are no feature results and that evaluation are disregarded. See also -------- uncertainpy.features.Features.preprocess : The features preprocess method. uncertainpy.models.Model.run : The model run method """ # Perform feature calculations here time = None values = None return time, values
[ "def", "reference_feature", "(", "self", ",", "time", ",", "values", ",", "info", ")", ":", "# Perform feature calculations here", "time", "=", "None", "values", "=", "None", "return", "time", ",", "values" ]
https://github.com/simetenn/uncertainpy/blob/ffb2400289743066265b9a8561cdf3b72e478a28/src/uncertainpy/features/efel_features.py#L250-L284
microsoft/MPNet
081523a788c1556f28dd90cbc629810f48b083fb
pretraining/fairseq/optim/bmuf.py
python
FairseqBMUF.step
(self, closure=None)
Performs a single optimization step.
Performs a single optimization step.
[ "Performs", "a", "single", "optimization", "step", "." ]
def step(self, closure=None): """Performs a single optimization step.""" self._optimizer.step(closure) self.set_num_updates(self.get_num_updates() + 1) if self._is_warmup_end(): self._warmup_sync() elif self._is_bmuf_iter(): self._block_sync()
[ "def", "step", "(", "self", ",", "closure", "=", "None", ")", ":", "self", ".", "_optimizer", ".", "step", "(", "closure", ")", "self", ".", "set_num_updates", "(", "self", ".", "get_num_updates", "(", ")", "+", "1", ")", "if", "self", ".", "_is_warmup_end", "(", ")", ":", "self", ".", "_warmup_sync", "(", ")", "elif", "self", ".", "_is_bmuf_iter", "(", ")", ":", "self", ".", "_block_sync", "(", ")" ]
https://github.com/microsoft/MPNet/blob/081523a788c1556f28dd90cbc629810f48b083fb/pretraining/fairseq/optim/bmuf.py#L147-L154
general03/flask-autoindex
424246242c9f40aeb9ac2c8c63f4d2234024256e
.eggs/Jinja2-3.0.0a1-py3.7.egg/jinja2/environment.py
python
Environment.overlay
( self, block_start_string=missing, block_end_string=missing, variable_start_string=missing, variable_end_string=missing, comment_start_string=missing, comment_end_string=missing, line_statement_prefix=missing, line_comment_prefix=missing, trim_blocks=missing, lstrip_blocks=missing, extensions=missing, optimized=missing, undefined=missing, finalize=missing, autoescape=missing, loader=missing, cache_size=missing, auto_reload=missing, bytecode_cache=missing, )
return _environment_sanity_check(rv)
Create a new overlay environment that shares all the data with the current environment except for cache and the overridden attributes. Extensions cannot be removed for an overlayed environment. An overlayed environment automatically gets all the extensions of the environment it is linked to plus optional extra extensions. Creating overlays should happen after the initial environment was set up completely. Not all attributes are truly linked, some are just copied over so modifications on the original environment may not shine through.
Create a new overlay environment that shares all the data with the current environment except for cache and the overridden attributes. Extensions cannot be removed for an overlayed environment. An overlayed environment automatically gets all the extensions of the environment it is linked to plus optional extra extensions.
[ "Create", "a", "new", "overlay", "environment", "that", "shares", "all", "the", "data", "with", "the", "current", "environment", "except", "for", "cache", "and", "the", "overridden", "attributes", ".", "Extensions", "cannot", "be", "removed", "for", "an", "overlayed", "environment", ".", "An", "overlayed", "environment", "automatically", "gets", "all", "the", "extensions", "of", "the", "environment", "it", "is", "linked", "to", "plus", "optional", "extra", "extensions", "." ]
def overlay( self, block_start_string=missing, block_end_string=missing, variable_start_string=missing, variable_end_string=missing, comment_start_string=missing, comment_end_string=missing, line_statement_prefix=missing, line_comment_prefix=missing, trim_blocks=missing, lstrip_blocks=missing, extensions=missing, optimized=missing, undefined=missing, finalize=missing, autoescape=missing, loader=missing, cache_size=missing, auto_reload=missing, bytecode_cache=missing, ): """Create a new overlay environment that shares all the data with the current environment except for cache and the overridden attributes. Extensions cannot be removed for an overlayed environment. An overlayed environment automatically gets all the extensions of the environment it is linked to plus optional extra extensions. Creating overlays should happen after the initial environment was set up completely. Not all attributes are truly linked, some are just copied over so modifications on the original environment may not shine through. """ args = dict(locals()) del args["self"], args["cache_size"], args["extensions"] rv = object.__new__(self.__class__) rv.__dict__.update(self.__dict__) rv.overlayed = True rv.linked_to = self for key, value in args.items(): if value is not missing: setattr(rv, key, value) if cache_size is not missing: rv.cache = create_cache(cache_size) else: rv.cache = copy_cache(self.cache) rv.extensions = {} for key, value in self.extensions.items(): rv.extensions[key] = value.bind(rv) if extensions is not missing: rv.extensions.update(load_extensions(rv, extensions)) return _environment_sanity_check(rv)
[ "def", "overlay", "(", "self", ",", "block_start_string", "=", "missing", ",", "block_end_string", "=", "missing", ",", "variable_start_string", "=", "missing", ",", "variable_end_string", "=", "missing", ",", "comment_start_string", "=", "missing", ",", "comment_end_string", "=", "missing", ",", "line_statement_prefix", "=", "missing", ",", "line_comment_prefix", "=", "missing", ",", "trim_blocks", "=", "missing", ",", "lstrip_blocks", "=", "missing", ",", "extensions", "=", "missing", ",", "optimized", "=", "missing", ",", "undefined", "=", "missing", ",", "finalize", "=", "missing", ",", "autoescape", "=", "missing", ",", "loader", "=", "missing", ",", "cache_size", "=", "missing", ",", "auto_reload", "=", "missing", ",", "bytecode_cache", "=", "missing", ",", ")", ":", "args", "=", "dict", "(", "locals", "(", ")", ")", "del", "args", "[", "\"self\"", "]", ",", "args", "[", "\"cache_size\"", "]", ",", "args", "[", "\"extensions\"", "]", "rv", "=", "object", ".", "__new__", "(", "self", ".", "__class__", ")", "rv", ".", "__dict__", ".", "update", "(", "self", ".", "__dict__", ")", "rv", ".", "overlayed", "=", "True", "rv", ".", "linked_to", "=", "self", "for", "key", ",", "value", "in", "args", ".", "items", "(", ")", ":", "if", "value", "is", "not", "missing", ":", "setattr", "(", "rv", ",", "key", ",", "value", ")", "if", "cache_size", "is", "not", "missing", ":", "rv", ".", "cache", "=", "create_cache", "(", "cache_size", ")", "else", ":", "rv", ".", "cache", "=", "copy_cache", "(", "self", ".", "cache", ")", "rv", ".", "extensions", "=", "{", "}", "for", "key", ",", "value", "in", "self", ".", "extensions", ".", "items", "(", ")", ":", "rv", ".", "extensions", "[", "key", "]", "=", "value", ".", "bind", "(", "rv", ")", "if", "extensions", "is", "not", "missing", ":", "rv", ".", "extensions", ".", "update", "(", "load_extensions", "(", "rv", ",", "extensions", ")", ")", "return", "_environment_sanity_check", "(", "rv", ")" ]
https://github.com/general03/flask-autoindex/blob/424246242c9f40aeb9ac2c8c63f4d2234024256e/.eggs/Jinja2-3.0.0a1-py3.7.egg/jinja2/environment.py#L375-L431
cronyo/cronyo
cd5abab0871b68bf31b18aac934303928130a441
cronyo/vendor/urllib3/util/retry.py
python
Retry.is_exhausted
(self)
return min(retry_counts) < 0
Are we out of retries?
Are we out of retries?
[ "Are", "we", "out", "of", "retries?" ]
def is_exhausted(self): """ Are we out of retries? """ retry_counts = (self.total, self.connect, self.read, self.redirect, self.status) retry_counts = list(filter(None, retry_counts)) if not retry_counts: return False return min(retry_counts) < 0
[ "def", "is_exhausted", "(", "self", ")", ":", "retry_counts", "=", "(", "self", ".", "total", ",", "self", ".", "connect", ",", "self", ".", "read", ",", "self", ".", "redirect", ",", "self", ".", "status", ")", "retry_counts", "=", "list", "(", "filter", "(", "None", ",", "retry_counts", ")", ")", "if", "not", "retry_counts", ":", "return", "False", "return", "min", "(", "retry_counts", ")", "<", "0" ]
https://github.com/cronyo/cronyo/blob/cd5abab0871b68bf31b18aac934303928130a441/cronyo/vendor/urllib3/util/retry.py#L346-L353
Fenixin/Minecraft-Region-Fixer
bfafd378ceb65116e4ea48cab24f1e6394051978
regionfixer_core/util.py
python
get_str_from_traceback
(ty, value, tb)
return s
Return a string from a traceback plus exception. Inputs: - ty -- Exception type - value -- value of the traceback - tb -- Traceback
Return a string from a traceback plus exception. Inputs: - ty -- Exception type - value -- value of the traceback - tb -- Traceback
[ "Return", "a", "string", "from", "a", "traceback", "plus", "exception", ".", "Inputs", ":", "-", "ty", "--", "Exception", "type", "-", "value", "--", "value", "of", "the", "traceback", "-", "tb", "--", "Traceback" ]
def get_str_from_traceback(ty, value, tb): """ Return a string from a traceback plus exception. Inputs: - ty -- Exception type - value -- value of the traceback - tb -- Traceback """ t = traceback.format_exception(ty, value, tb) s = str(ty) + "\n" for i in t: s += i return s
[ "def", "get_str_from_traceback", "(", "ty", ",", "value", ",", "tb", ")", ":", "t", "=", "traceback", ".", "format_exception", "(", "ty", ",", "value", ",", "tb", ")", "s", "=", "str", "(", "ty", ")", "+", "\"\\n\"", "for", "i", "in", "t", ":", "s", "+=", "i", "return", "s" ]
https://github.com/Fenixin/Minecraft-Region-Fixer/blob/bfafd378ceb65116e4ea48cab24f1e6394051978/regionfixer_core/util.py#L29-L43
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_hxb2/lib/python3.5/site-packages/redis/client.py
python
int_or_none
(response)
return int(response)
[]
def int_or_none(response): if response is None: return None return int(response)
[ "def", "int_or_none", "(", "response", ")", ":", "if", "response", "is", "None", ":", "return", "None", "return", "int", "(", "response", ")" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/redis/client.py#L229-L232
gdraheim/docker-systemctl-replacement
9cbe1a00eb4bdac6ff05b96ca34ec9ed3d8fc06c
files/docker/systemctl.py
python
Systemctl.is_active_from
(self, conf)
return self.get_active_from(conf) == "active"
used in try-restart/other commands to check if needed.
used in try-restart/other commands to check if needed.
[ "used", "in", "try", "-", "restart", "/", "other", "commands", "to", "check", "if", "needed", "." ]
def is_active_from(self, conf): """ used in try-restart/other commands to check if needed. """ if not conf: return False return self.get_active_from(conf) == "active"
[ "def", "is_active_from", "(", "self", ",", "conf", ")", ":", "if", "not", "conf", ":", "return", "False", "return", "self", ".", "get_active_from", "(", "conf", ")", "==", "\"active\"" ]
https://github.com/gdraheim/docker-systemctl-replacement/blob/9cbe1a00eb4bdac6ff05b96ca34ec9ed3d8fc06c/files/docker/systemctl.py#L4072-L4075
orestis/pysmell
14382f377f7759a1b6505120990898dd51f175e6
pysmell/tm_dialog.py
python
item
(val)
[]
def item(val): if isinstance(val, basestring): return {"title": val} if isinstance(val, tuple): return {"title": val[0]} elif val is None: return {"separator": 1}
[ "def", "item", "(", "val", ")", ":", "if", "isinstance", "(", "val", ",", "basestring", ")", ":", "return", "{", "\"title\"", ":", "val", "}", "if", "isinstance", "(", "val", ",", "tuple", ")", ":", "return", "{", "\"title\"", ":", "val", "[", "0", "]", "}", "elif", "val", "is", "None", ":", "return", "{", "\"separator\"", ":", "1", "}" ]
https://github.com/orestis/pysmell/blob/14382f377f7759a1b6505120990898dd51f175e6/pysmell/tm_dialog.py#L22-L28
Map-A-Droid/MAD
81375b5c9ccc5ca3161eb487aa81469d40ded221
mapadroid/patcher/patch_15.py
python
Patch._execute
(self)
[]
def _execute(self): try: with open(self._application_args.mappings, 'rb') as fh: settings = json.load(fh) self.__convert_to_id(settings) with open(self._application_args.mappings, 'w') as outfile: json.dump(settings, outfile, indent=4, sort_keys=True) except IOError: pass
[ "def", "_execute", "(", "self", ")", ":", "try", ":", "with", "open", "(", "self", ".", "_application_args", ".", "mappings", ",", "'rb'", ")", "as", "fh", ":", "settings", "=", "json", ".", "load", "(", "fh", ")", "self", ".", "__convert_to_id", "(", "settings", ")", "with", "open", "(", "self", ".", "_application_args", ".", "mappings", ",", "'w'", ")", "as", "outfile", ":", "json", ".", "dump", "(", "settings", ",", "outfile", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", "except", "IOError", ":", "pass" ]
https://github.com/Map-A-Droid/MAD/blob/81375b5c9ccc5ca3161eb487aa81469d40ded221/mapadroid/patcher/patch_15.py#L10-L18
fonttools/fonttools
892322aaff6a89bea5927379ec06bc0da3dfb7df
Lib/fontTools/ttLib/tables/otConverters.py
python
AATLookupWithDataOffset.write
(self, writer, font, tableDict, value, repeatIndex=None)
[]
def write(self, writer, font, tableDict, value, repeatIndex=None): # We do not work with OTTableWriter sub-writers because # the offsets in our AATLookup are relative to our data # table, for which we need to provide an offset value itself. # It might have been possible to somehow make a kludge for # performing this indirect offset computation directly inside # OTTableWriter. But this would have made the internal logic # of OTTableWriter even more complex than it already is, # so we decided to roll our own offset computation for the # contents of the AATLookup and associated data table. offsetByGlyph, offsetByData, dataLen = {}, {}, 0 compiledData = [] for glyph in sorted(value, key=font.getGlyphID): subWriter = OTTableWriter() value[glyph].compile(subWriter, font) data = subWriter.getAllData() offset = offsetByData.get(data, None) if offset == None: offset = dataLen dataLen = dataLen + len(data) offsetByData[data] = offset compiledData.append(data) offsetByGlyph[glyph] = offset # For calculating the offsets to our AATLookup and data table, # we can use the regular OTTableWriter infrastructure. lookupWriter = writer.getSubWriter(offsetSize=4) lookup = AATLookup('DataOffsets', None, None, UShort) lookup.write(lookupWriter, font, tableDict, offsetByGlyph, None) dataWriter = writer.getSubWriter(offsetSize=4) writer.writeSubTable(lookupWriter) writer.writeSubTable(dataWriter) for d in compiledData: dataWriter.writeData(d)
[ "def", "write", "(", "self", ",", "writer", ",", "font", ",", "tableDict", ",", "value", ",", "repeatIndex", "=", "None", ")", ":", "# We do not work with OTTableWriter sub-writers because", "# the offsets in our AATLookup are relative to our data", "# table, for which we need to provide an offset value itself.", "# It might have been possible to somehow make a kludge for", "# performing this indirect offset computation directly inside", "# OTTableWriter. But this would have made the internal logic", "# of OTTableWriter even more complex than it already is,", "# so we decided to roll our own offset computation for the", "# contents of the AATLookup and associated data table.", "offsetByGlyph", ",", "offsetByData", ",", "dataLen", "=", "{", "}", ",", "{", "}", ",", "0", "compiledData", "=", "[", "]", "for", "glyph", "in", "sorted", "(", "value", ",", "key", "=", "font", ".", "getGlyphID", ")", ":", "subWriter", "=", "OTTableWriter", "(", ")", "value", "[", "glyph", "]", ".", "compile", "(", "subWriter", ",", "font", ")", "data", "=", "subWriter", ".", "getAllData", "(", ")", "offset", "=", "offsetByData", ".", "get", "(", "data", ",", "None", ")", "if", "offset", "==", "None", ":", "offset", "=", "dataLen", "dataLen", "=", "dataLen", "+", "len", "(", "data", ")", "offsetByData", "[", "data", "]", "=", "offset", "compiledData", ".", "append", "(", "data", ")", "offsetByGlyph", "[", "glyph", "]", "=", "offset", "# For calculating the offsets to our AATLookup and data table,", "# we can use the regular OTTableWriter infrastructure.", "lookupWriter", "=", "writer", ".", "getSubWriter", "(", "offsetSize", "=", "4", ")", "lookup", "=", "AATLookup", "(", "'DataOffsets'", ",", "None", ",", "None", ",", "UShort", ")", "lookup", ".", "write", "(", "lookupWriter", ",", "font", ",", "tableDict", ",", "offsetByGlyph", ",", "None", ")", "dataWriter", "=", "writer", ".", "getSubWriter", "(", "offsetSize", "=", "4", ")", "writer", ".", "writeSubTable", "(", "lookupWriter", ")", "writer", ".", "writeSubTable", "(", "dataWriter", ")", "for", "d", "in", "compiledData", ":", "dataWriter", ".", "writeData", "(", "d", ")" ]
https://github.com/fonttools/fonttools/blob/892322aaff6a89bea5927379ec06bc0da3dfb7df/Lib/fontTools/ttLib/tables/otConverters.py#L965-L998
huggingface/transformers
623b4f7c63f60cce917677ee704d6c93ee960b4b
src/transformers/utils/dummy_pt_objects.py
python
MPNetForMaskedLM.from_pretrained
(cls, *args, **kwargs)
[]
def from_pretrained(cls, *args, **kwargs): requires_backends(cls, ["torch"])
[ "def", "from_pretrained", "(", "cls", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "requires_backends", "(", "cls", ",", "[", "\"torch\"", "]", ")" ]
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/utils/dummy_pt_objects.py#L3557-L3558
lohriialo/photoshop-scripting-python
6b97da967a5d0a45e54f7c99631b29773b923f09
api_reference/photoshop_2020.py
python
Document.MergeVisibleLayers
(self)
return self._oleobj_.InvokeTypes(1299608418, LCID, 1, (24, 0), (),)
flatten all visible layers in the document
flatten all visible layers in the document
[ "flatten", "all", "visible", "layers", "in", "the", "document" ]
def MergeVisibleLayers(self): 'flatten all visible layers in the document' return self._oleobj_.InvokeTypes(1299608418, LCID, 1, (24, 0), (),)
[ "def", "MergeVisibleLayers", "(", "self", ")", ":", "return", "self", ".", "_oleobj_", ".", "InvokeTypes", "(", "1299608418", ",", "LCID", ",", "1", ",", "(", "24", ",", "0", ")", ",", "(", ")", ",", ")" ]
https://github.com/lohriialo/photoshop-scripting-python/blob/6b97da967a5d0a45e54f7c99631b29773b923f09/api_reference/photoshop_2020.py#L1628-L1630
glutanimate/anki-addons-misc
835ad28ec0927067b7acb4cd106e9b136205a838
src/editor_field_history/editor_field_history.py
python
restoreEditorFields
(self, mode)
[]
def restoreEditorFields(self, mode): if not self.note: # catch invalid state return # Gather note info fld = self.currentField if fld is None and mode in ("history", "field"): # only necessary on anki20 tooltip("Please select a field whose last entry you want to restore.") saveChanges(self, fld) return False did = self.parentWindow.deck_chooser.selectedId() deck = self.mw.col.decks.nameOrNone(did) model = self.note.model() # Perform search if deck: query = 'deck:"%s"' % (deck) results = self.note.col.findNotes(query) if not results: tooltip("Could not find any past notes in current deck.<br>" "If you just imported a deck you might have to restart Anki.") saveChanges(self, fld) return False results.sort(reverse=True) # Get user selection if mode == "history": ret = historyRestore(self, mode, results, model, fld) else: ret = quickRestore(self, mode, results, model, fld) if ret is False: saveChanges(self, fld) return False # Save changes saveChanges(self, fld)
[ "def", "restoreEditorFields", "(", "self", ",", "mode", ")", ":", "if", "not", "self", ".", "note", ":", "# catch invalid state", "return", "# Gather note info", "fld", "=", "self", ".", "currentField", "if", "fld", "is", "None", "and", "mode", "in", "(", "\"history\"", ",", "\"field\"", ")", ":", "# only necessary on anki20", "tooltip", "(", "\"Please select a field whose last entry you want to restore.\"", ")", "saveChanges", "(", "self", ",", "fld", ")", "return", "False", "did", "=", "self", ".", "parentWindow", ".", "deck_chooser", ".", "selectedId", "(", ")", "deck", "=", "self", ".", "mw", ".", "col", ".", "decks", ".", "nameOrNone", "(", "did", ")", "model", "=", "self", ".", "note", ".", "model", "(", ")", "# Perform search", "if", "deck", ":", "query", "=", "'deck:\"%s\"'", "%", "(", "deck", ")", "results", "=", "self", ".", "note", ".", "col", ".", "findNotes", "(", "query", ")", "if", "not", "results", ":", "tooltip", "(", "\"Could not find any past notes in current deck.<br>\"", "\"If you just imported a deck you might have to restart Anki.\"", ")", "saveChanges", "(", "self", ",", "fld", ")", "return", "False", "results", ".", "sort", "(", "reverse", "=", "True", ")", "# Get user selection", "if", "mode", "==", "\"history\"", ":", "ret", "=", "historyRestore", "(", "self", ",", "mode", ",", "results", ",", "model", ",", "fld", ")", "else", ":", "ret", "=", "quickRestore", "(", "self", ",", "mode", ",", "results", ",", "model", ",", "fld", ")", "if", "ret", "is", "False", ":", "saveChanges", "(", "self", ",", "fld", ")", "return", "False", "# Save changes", "saveChanges", "(", "self", ",", "fld", ")" ]
https://github.com/glutanimate/anki-addons-misc/blob/835ad28ec0927067b7acb4cd106e9b136205a838/src/editor_field_history/editor_field_history.py#L155-L191
lazylibrarian/LazyLibrarian
ae3c14e9db9328ce81765e094ab2a14ed7155624
lib/requests/api.py
python
request
(method, url, **kwargs)
Constructs and sends a :class:`Request <Request>`. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response <Response>` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') <Response [200]>
Constructs and sends a :class:`Request <Request>`.
[ "Constructs", "and", "sends", "a", ":", "class", ":", "Request", "<Request", ">", "." ]
def request(method, url, **kwargs): """Constructs and sends a :class:`Request <Request>`. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response <Response>` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'http://httpbin.org/get') <Response [200]> """ # By using the 'with' statement we are sure the session is closed, thus we # avoid leaving sockets open which can trigger a ResourceWarning in some # cases, and look like a memory leak in others. with sessions.Session() as session: return session.request(method=method, url=url, **kwargs)
[ "def", "request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")", ":", "# By using the 'with' statement we are sure the session is closed, thus we", "# avoid leaving sockets open which can trigger a ResourceWarning in some", "# cases, and look like a memory leak in others.", "with", "sessions", ".", "Session", "(", ")", "as", "session", ":", "return", "session", ".", "request", "(", "method", "=", "method", ",", "url", "=", "url", ",", "*", "*", "kwargs", ")" ]
https://github.com/lazylibrarian/LazyLibrarian/blob/ae3c14e9db9328ce81765e094ab2a14ed7155624/lib/requests/api.py#L16-L56
AutodeskRoboticsLab/Mimic
85447f0d346be66988303a6a054473d92f1ed6f4
mFIZ/scripts/mFIZ_extern/serial/serialutil.py
python
SerialBase.__init__
(self, port=None, baudrate=9600, bytesize=EIGHTBITS, parity=PARITY_NONE, stopbits=STOPBITS_ONE, timeout=None, xonxoff=False, rtscts=False, write_timeout=None, dsrdtr=False, inter_byte_timeout=None, **kwargs)
\ Initialize comm port object. If a "port" is given, then the port will be opened immediately. Otherwise a Serial port object in closed state is returned.
\ Initialize comm port object. If a "port" is given, then the port will be opened immediately. Otherwise a Serial port object in closed state is returned.
[ "\\", "Initialize", "comm", "port", "object", ".", "If", "a", "port", "is", "given", "then", "the", "port", "will", "be", "opened", "immediately", ".", "Otherwise", "a", "Serial", "port", "object", "in", "closed", "state", "is", "returned", "." ]
def __init__(self, port=None, baudrate=9600, bytesize=EIGHTBITS, parity=PARITY_NONE, stopbits=STOPBITS_ONE, timeout=None, xonxoff=False, rtscts=False, write_timeout=None, dsrdtr=False, inter_byte_timeout=None, **kwargs): """\ Initialize comm port object. If a "port" is given, then the port will be opened immediately. Otherwise a Serial port object in closed state is returned. """ self.is_open = False self.portstr = None self.name = None # correct values are assigned below through properties self._port = None self._baudrate = None self._bytesize = None self._parity = None self._stopbits = None self._timeout = None self._write_timeout = None self._xonxoff = None self._rtscts = None self._dsrdtr = None self._inter_byte_timeout = None self._rs485_mode = None # disabled by default self._rts_state = True self._dtr_state = True self._break_state = False # assign values using get/set methods using the properties feature self.port = port self.baudrate = baudrate self.bytesize = bytesize self.parity = parity self.stopbits = stopbits self.timeout = timeout self.write_timeout = write_timeout self.xonxoff = xonxoff self.rtscts = rtscts self.dsrdtr = dsrdtr self.inter_byte_timeout = inter_byte_timeout # watch for backward compatible kwargs if 'writeTimeout' in kwargs: self.write_timeout = kwargs.pop('writeTimeout') if 'interCharTimeout' in kwargs: self.inter_byte_timeout = kwargs.pop('interCharTimeout') if kwargs: raise ValueError('unexpected keyword arguments: {!r}'.format(kwargs)) if port is not None: self.open()
[ "def", "__init__", "(", "self", ",", "port", "=", "None", ",", "baudrate", "=", "9600", ",", "bytesize", "=", "EIGHTBITS", ",", "parity", "=", "PARITY_NONE", ",", "stopbits", "=", "STOPBITS_ONE", ",", "timeout", "=", "None", ",", "xonxoff", "=", "False", ",", "rtscts", "=", "False", ",", "write_timeout", "=", "None", ",", "dsrdtr", "=", "False", ",", "inter_byte_timeout", "=", "None", ",", "*", "*", "kwargs", ")", ":", "self", ".", "is_open", "=", "False", "self", ".", "portstr", "=", "None", "self", ".", "name", "=", "None", "# correct values are assigned below through properties", "self", ".", "_port", "=", "None", "self", ".", "_baudrate", "=", "None", "self", ".", "_bytesize", "=", "None", "self", ".", "_parity", "=", "None", "self", ".", "_stopbits", "=", "None", "self", ".", "_timeout", "=", "None", "self", ".", "_write_timeout", "=", "None", "self", ".", "_xonxoff", "=", "None", "self", ".", "_rtscts", "=", "None", "self", ".", "_dsrdtr", "=", "None", "self", ".", "_inter_byte_timeout", "=", "None", "self", ".", "_rs485_mode", "=", "None", "# disabled by default", "self", ".", "_rts_state", "=", "True", "self", ".", "_dtr_state", "=", "True", "self", ".", "_break_state", "=", "False", "# assign values using get/set methods using the properties feature", "self", ".", "port", "=", "port", "self", ".", "baudrate", "=", "baudrate", "self", ".", "bytesize", "=", "bytesize", "self", ".", "parity", "=", "parity", "self", ".", "stopbits", "=", "stopbits", "self", ".", "timeout", "=", "timeout", "self", ".", "write_timeout", "=", "write_timeout", "self", ".", "xonxoff", "=", "xonxoff", "self", ".", "rtscts", "=", "rtscts", "self", ".", "dsrdtr", "=", "dsrdtr", "self", ".", "inter_byte_timeout", "=", "inter_byte_timeout", "# watch for backward compatible kwargs", "if", "'writeTimeout'", "in", "kwargs", ":", "self", ".", "write_timeout", "=", "kwargs", ".", "pop", "(", "'writeTimeout'", ")", "if", "'interCharTimeout'", "in", "kwargs", ":", "self", ".", "inter_byte_timeout", "=", "kwargs", ".", "pop", "(", "'interCharTimeout'", ")", "if", "kwargs", ":", "raise", "ValueError", "(", "'unexpected keyword arguments: {!r}'", ".", "format", "(", "kwargs", ")", ")", "if", "port", "is", "not", "None", ":", "self", ".", "open", "(", ")" ]
https://github.com/AutodeskRoboticsLab/Mimic/blob/85447f0d346be66988303a6a054473d92f1ed6f4/mFIZ/scripts/mFIZ_extern/serial/serialutil.py#L176-L236
ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework
cb692f527e4e819b6c228187c5702d990a180043
bin/x86/Debug/scripting_engine/Lib/site-packages/lxml/html/diff.py
python
split_words
(text)
return words
Splits some text into words. Includes trailing whitespace (one space) on each word when appropriate.
Splits some text into words. Includes trailing whitespace (one space) on each word when appropriate.
[ "Splits", "some", "text", "into", "words", ".", "Includes", "trailing", "whitespace", "(", "one", "space", ")", "on", "each", "word", "when", "appropriate", "." ]
def split_words(text): """ Splits some text into words. Includes trailing whitespace (one space) on each word when appropriate. """ if not text or not text.strip(): return [] words = [w + ' ' for w in text.strip().split()] if not end_whitespace_re.search(text): words[-1] = words[-1][:-1] return words
[ "def", "split_words", "(", "text", ")", ":", "if", "not", "text", "or", "not", "text", ".", "strip", "(", ")", ":", "return", "[", "]", "words", "=", "[", "w", "+", "' '", "for", "w", "in", "text", ".", "strip", "(", ")", ".", "split", "(", ")", "]", "if", "not", "end_whitespace_re", ".", "search", "(", "text", ")", ":", "words", "[", "-", "1", "]", "=", "words", "[", "-", "1", "]", "[", ":", "-", "1", "]", "return", "words" ]
https://github.com/ajinabraham/OWASP-Xenotix-XSS-Exploit-Framework/blob/cb692f527e4e819b6c228187c5702d990a180043/bin/x86/Debug/scripting_engine/Lib/site-packages/lxml/html/diff.py#L704-L712
google/jax
bebe9845a873b3203f8050395255f173ba3bbb71
jax/experimental/sparse/transform.py
python
sparsify
(f, use_tracer=False)
Experimental sparsification transform. Examples: Decorate JAX functions to make them compatible with :class:`jax.experimental.sparse.BCOO` matrices: >>> from jax.experimental import sparse >>> @sparse.sparsify ... def f(M, v): ... return 2 * M.T @ v >>> M = sparse.BCOO.fromdense(jnp.arange(12).reshape(3, 4)) >>> v = jnp.array([3, 4, 2]) >>> f(M, v) DeviceArray([ 64, 82, 100, 118], dtype=int32)
Experimental sparsification transform.
[ "Experimental", "sparsification", "transform", "." ]
def sparsify(f, use_tracer=False): """Experimental sparsification transform. Examples: Decorate JAX functions to make them compatible with :class:`jax.experimental.sparse.BCOO` matrices: >>> from jax.experimental import sparse >>> @sparse.sparsify ... def f(M, v): ... return 2 * M.T @ v >>> M = sparse.BCOO.fromdense(jnp.arange(12).reshape(3, 4)) >>> v = jnp.array([3, 4, 2]) >>> f(M, v) DeviceArray([ 64, 82, 100, 118], dtype=int32) """ if use_tracer: return _sparsify_with_tracer(f) else: return _sparsify_with_interpreter(f)
[ "def", "sparsify", "(", "f", ",", "use_tracer", "=", "False", ")", ":", "if", "use_tracer", ":", "return", "_sparsify_with_tracer", "(", "f", ")", "else", ":", "return", "_sparsify_with_interpreter", "(", "f", ")" ]
https://github.com/google/jax/blob/bebe9845a873b3203f8050395255f173ba3bbb71/jax/experimental/sparse/transform.py#L358-L382
pyscf/pyscf
0adfb464333f5ceee07b664f291d4084801bae64
pyscf/geomopt/geometric_solver.py
python
optimize
(method, assert_convergence=ASSERT_CONV, include_ghost=INCLUDE_GHOST, constraints=None, callback=None, maxsteps=100, **kwargs)
return kernel(method, assert_convergence=assert_convergence, include_ghost=include_ghost, constraints=constraints, callback=callback, maxsteps=maxsteps, **kwargs)[1]
Optimize geometry with geomeTRIC library for the given method. To adjust the convergence threshold, parameters can be set in kwargs as below: .. code-block:: python conv_params = { # They are default settings 'convergence_energy': 1e-6, # Eh 'convergence_grms': 3e-4, # Eh/Bohr 'convergence_gmax': 4.5e-4, # Eh/Bohr 'convergence_drms': 1.2e-3, # Angstrom 'convergence_dmax': 1.8e-3, # Angstrom } from pyscf import geometric_solver newmol = geometric_solver.optimize(method, **conv_params)
Optimize geometry with geomeTRIC library for the given method.
[ "Optimize", "geometry", "with", "geomeTRIC", "library", "for", "the", "given", "method", "." ]
def optimize(method, assert_convergence=ASSERT_CONV, include_ghost=INCLUDE_GHOST, constraints=None, callback=None, maxsteps=100, **kwargs): '''Optimize geometry with geomeTRIC library for the given method. To adjust the convergence threshold, parameters can be set in kwargs as below: .. code-block:: python conv_params = { # They are default settings 'convergence_energy': 1e-6, # Eh 'convergence_grms': 3e-4, # Eh/Bohr 'convergence_gmax': 4.5e-4, # Eh/Bohr 'convergence_drms': 1.2e-3, # Angstrom 'convergence_dmax': 1.8e-3, # Angstrom } from pyscf import geometric_solver newmol = geometric_solver.optimize(method, **conv_params) ''' # MRH, 07/23/2019: name all explicit kwargs for forward compatibility return kernel(method, assert_convergence=assert_convergence, include_ghost=include_ghost, constraints=constraints, callback=callback, maxsteps=maxsteps, **kwargs)[1]
[ "def", "optimize", "(", "method", ",", "assert_convergence", "=", "ASSERT_CONV", ",", "include_ghost", "=", "INCLUDE_GHOST", ",", "constraints", "=", "None", ",", "callback", "=", "None", ",", "maxsteps", "=", "100", ",", "*", "*", "kwargs", ")", ":", "# MRH, 07/23/2019: name all explicit kwargs for forward compatibility", "return", "kernel", "(", "method", ",", "assert_convergence", "=", "assert_convergence", ",", "include_ghost", "=", "include_ghost", ",", "constraints", "=", "constraints", ",", "callback", "=", "callback", ",", "maxsteps", "=", "maxsteps", ",", "*", "*", "kwargs", ")", "[", "1", "]" ]
https://github.com/pyscf/pyscf/blob/0adfb464333f5ceee07b664f291d4084801bae64/pyscf/geomopt/geometric_solver.py#L162-L183
i-pan/kaggle-rsna18
2db498fe99615d935aa676f04847d0c562fd8e46
models/RetinaNet/keras_retinanet/models/vgg.py
python
VGGBackbone.retinanet
(self, *args, **kwargs)
return vgg_retinanet(*args, backbone=self.backbone, **kwargs)
Returns a retinanet model using the correct backbone.
Returns a retinanet model using the correct backbone.
[ "Returns", "a", "retinanet", "model", "using", "the", "correct", "backbone", "." ]
def retinanet(self, *args, **kwargs): """ Returns a retinanet model using the correct backbone. """ return vgg_retinanet(*args, backbone=self.backbone, **kwargs)
[ "def", "retinanet", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "vgg_retinanet", "(", "*", "args", ",", "backbone", "=", "self", ".", "backbone", ",", "*", "*", "kwargs", ")" ]
https://github.com/i-pan/kaggle-rsna18/blob/2db498fe99615d935aa676f04847d0c562fd8e46/models/RetinaNet/keras_retinanet/models/vgg.py#L30-L33
aaPanel/BaoTa
9bb1336f31ae2893ab513af7a3efed633338c64b
class/clusterDns.py
python
aliyun.check_result
(self,req)
return result
@name 检查响应结果 @author hwliang<2020-10-30> @param req<requests> 响应结果 @return mixed
[]
def check_result(self,req): ''' @name 检查响应结果 @author hwliang<2020-10-30> @param req<requests> 响应结果 @return mixed ''' result = req.json() if req.status_code != 200: if result['Code'] == 'IncorrectDomainUser' or result['Code'] == 'InvalidDomainName.NoExist': return public.returnMsg(False,"这个阿里云账户下面不存在这个域名") elif result['Code'] == 'InvalidAccessKeyId.NotFound' or result['Code'] == 'SignatureDoesNotMatch': return public.returnMsg(False,"API密钥错误") else: return public.returnMsg(False,result['Message']) return result
[ "def", "check_result", "(", "self", ",", "req", ")", ":", "result", "=", "req", ".", "json", "(", ")", "if", "req", ".", "status_code", "!=", "200", ":", "if", "result", "[", "'Code'", "]", "==", "'IncorrectDomainUser'", "or", "result", "[", "'Code'", "]", "==", "'InvalidDomainName.NoExist'", ":", "return", "public", ".", "returnMsg", "(", "False", ",", "\"这个阿里云账户下面不存在这个域名\")", "", "elif", "result", "[", "'Code'", "]", "==", "'InvalidAccessKeyId.NotFound'", "or", "result", "[", "'Code'", "]", "==", "'SignatureDoesNotMatch'", ":", "return", "public", ".", "returnMsg", "(", "False", ",", "\"API密钥错误\")", "", "else", ":", "return", "public", ".", "returnMsg", "(", "False", ",", "result", "[", "'Message'", "]", ")", "return", "result" ]
https://github.com/aaPanel/BaoTa/blob/9bb1336f31ae2893ab513af7a3efed633338c64b/class/clusterDns.py#L125-L140
scrapy/scrapy
b04cfa48328d5d5749dca6f50fa34e0cfc664c89
scrapy/extensions/memusage.py
python
MemoryUsage.engine_started
(self)
[]
def engine_started(self): self.crawler.stats.set_value('memusage/startup', self.get_virtual_size()) self.tasks = [] tsk = task.LoopingCall(self.update) self.tasks.append(tsk) tsk.start(self.check_interval, now=True) if self.limit: tsk = task.LoopingCall(self._check_limit) self.tasks.append(tsk) tsk.start(self.check_interval, now=True) if self.warning: tsk = task.LoopingCall(self._check_warning) self.tasks.append(tsk) tsk.start(self.check_interval, now=True)
[ "def", "engine_started", "(", "self", ")", ":", "self", ".", "crawler", ".", "stats", ".", "set_value", "(", "'memusage/startup'", ",", "self", ".", "get_virtual_size", "(", ")", ")", "self", ".", "tasks", "=", "[", "]", "tsk", "=", "task", ".", "LoopingCall", "(", "self", ".", "update", ")", "self", ".", "tasks", ".", "append", "(", "tsk", ")", "tsk", ".", "start", "(", "self", ".", "check_interval", ",", "now", "=", "True", ")", "if", "self", ".", "limit", ":", "tsk", "=", "task", ".", "LoopingCall", "(", "self", ".", "_check_limit", ")", "self", ".", "tasks", ".", "append", "(", "tsk", ")", "tsk", ".", "start", "(", "self", ".", "check_interval", ",", "now", "=", "True", ")", "if", "self", ".", "warning", ":", "tsk", "=", "task", ".", "LoopingCall", "(", "self", ".", "_check_warning", ")", "self", ".", "tasks", ".", "append", "(", "tsk", ")", "tsk", ".", "start", "(", "self", ".", "check_interval", ",", "now", "=", "True", ")" ]
https://github.com/scrapy/scrapy/blob/b04cfa48328d5d5749dca6f50fa34e0cfc664c89/scrapy/extensions/memusage.py#L54-L67
IdentityPython/pysaml2
6badb32d212257bd83ffcc816f9b625f68281b47
src/saml2/ws/wstrust.py
python
participants_type__primary_from_string
(xml_string)
return saml2.create_class_from_xml_string(ParticipantsType_Primary, xml_string)
[]
def participants_type__primary_from_string(xml_string): return saml2.create_class_from_xml_string(ParticipantsType_Primary, xml_string)
[ "def", "participants_type__primary_from_string", "(", "xml_string", ")", ":", "return", "saml2", ".", "create_class_from_xml_string", "(", "ParticipantsType_Primary", ",", "xml_string", ")" ]
https://github.com/IdentityPython/pysaml2/blob/6badb32d212257bd83ffcc816f9b625f68281b47/src/saml2/ws/wstrust.py#L1456-L1457
marcosfede/algorithms
1ee7c815f9d556c9cef4d4b0d21ee3a409d21629
tree/binary_tree/is_symmetric/is_symmetric.py
python
_is_symmetric
(p, q)
return _is_symmetric(p.left, q.right) and _is_symmetric(p.right, q.left)
[]
def _is_symmetric(p, q): if p is None and q is None: return True if p is None or q is None or q.val != p.val: return False return _is_symmetric(p.left, q.right) and _is_symmetric(p.right, q.left)
[ "def", "_is_symmetric", "(", "p", ",", "q", ")", ":", "if", "p", "is", "None", "and", "q", "is", "None", ":", "return", "True", "if", "p", "is", "None", "or", "q", "is", "None", "or", "q", ".", "val", "!=", "p", ".", "val", ":", "return", "False", "return", "_is_symmetric", "(", "p", ".", "left", ",", "q", ".", "right", ")", "and", "_is_symmetric", "(", "p", ".", "right", ",", "q", ".", "left", ")" ]
https://github.com/marcosfede/algorithms/blob/1ee7c815f9d556c9cef4d4b0d21ee3a409d21629/tree/binary_tree/is_symmetric/is_symmetric.py#L7-L12
deeptools/deepTools
ac42d29c298c026aa0c53c9db2553087ebc86b97
deeptools/plotFingerprint.py
python
getCHANCE
(args, idx, mat)
return [pcenrich, diffenrich, CHANCEdivergence]
Compute the CHANCE p-value 1) In short, sort IP from lowest to highest, cosorting input at the same time. 2) Choose the argmax of the difference of the cumsum() of the above 3) Determine a scale factor according to the ratio at the position at step 2.
Compute the CHANCE p-value
[ "Compute", "the", "CHANCE", "p", "-", "value" ]
def getCHANCE(args, idx, mat): """ Compute the CHANCE p-value 1) In short, sort IP from lowest to highest, cosorting input at the same time. 2) Choose the argmax of the difference of the cumsum() of the above 3) Determine a scale factor according to the ratio at the position at step 2. """ # Get the index of the reference sample if args.JSDsample not in args.bamfiles: return [np.NAN, np.NAN, np.NAN] refIdx = args.bamfiles.index(args.JSDsample) if refIdx == idx: return [np.NAN, np.NAN, np.NAN] subMatrix = np.copy(mat[:, [idx, refIdx]]) subMatrix[np.isnan(subMatrix)] = 0 subMatrix = subMatrix[subMatrix[:, 0].argsort(), :] # Find the CHANCE statistic, which is the point of maximus difference cs = np.cumsum(subMatrix, axis=0) normed = cs / np.max(cs, axis=0).astype(float) csdiff = normed[:, 1] - normed[:, 0] k = np.argmax(csdiff) if csdiff[k] < 1e-6: # Don't bother with negative values return [0, 0, 0] p = normed[k, 0] # Percent enrichment in IP q = normed[k, 1] # Percent enrichment in input pcenrich = 100 * (len(csdiff) - k) / float(len(csdiff)) diffenrich = 100.0 * (q - p) # CHANCE's JS divergence with binary entropy # Its p value is a ztest of this, which is largely useless IMO M = (p + q) / 2.0 CHANCEdivergence = 0.5 * (binRelEntropy(p, M) + binRelEntropy(q, M)) CHANCEdivergence = np.sqrt(CHANCEdivergence) return [pcenrich, diffenrich, CHANCEdivergence]
[ "def", "getCHANCE", "(", "args", ",", "idx", ",", "mat", ")", ":", "# Get the index of the reference sample", "if", "args", ".", "JSDsample", "not", "in", "args", ".", "bamfiles", ":", "return", "[", "np", ".", "NAN", ",", "np", ".", "NAN", ",", "np", ".", "NAN", "]", "refIdx", "=", "args", ".", "bamfiles", ".", "index", "(", "args", ".", "JSDsample", ")", "if", "refIdx", "==", "idx", ":", "return", "[", "np", ".", "NAN", ",", "np", ".", "NAN", ",", "np", ".", "NAN", "]", "subMatrix", "=", "np", ".", "copy", "(", "mat", "[", ":", ",", "[", "idx", ",", "refIdx", "]", "]", ")", "subMatrix", "[", "np", ".", "isnan", "(", "subMatrix", ")", "]", "=", "0", "subMatrix", "=", "subMatrix", "[", "subMatrix", "[", ":", ",", "0", "]", ".", "argsort", "(", ")", ",", ":", "]", "# Find the CHANCE statistic, which is the point of maximus difference", "cs", "=", "np", ".", "cumsum", "(", "subMatrix", ",", "axis", "=", "0", ")", "normed", "=", "cs", "/", "np", ".", "max", "(", "cs", ",", "axis", "=", "0", ")", ".", "astype", "(", "float", ")", "csdiff", "=", "normed", "[", ":", ",", "1", "]", "-", "normed", "[", ":", ",", "0", "]", "k", "=", "np", ".", "argmax", "(", "csdiff", ")", "if", "csdiff", "[", "k", "]", "<", "1e-6", ":", "# Don't bother with negative values", "return", "[", "0", ",", "0", ",", "0", "]", "p", "=", "normed", "[", "k", ",", "0", "]", "# Percent enrichment in IP", "q", "=", "normed", "[", "k", ",", "1", "]", "# Percent enrichment in input", "pcenrich", "=", "100", "*", "(", "len", "(", "csdiff", ")", "-", "k", ")", "/", "float", "(", "len", "(", "csdiff", ")", ")", "diffenrich", "=", "100.0", "*", "(", "q", "-", "p", ")", "# CHANCE's JS divergence with binary entropy", "# Its p value is a ztest of this, which is largely useless IMO", "M", "=", "(", "p", "+", "q", ")", "/", "2.0", "CHANCEdivergence", "=", "0.5", "*", "(", "binRelEntropy", "(", "p", ",", "M", ")", "+", "binRelEntropy", "(", "q", ",", "M", ")", ")", "CHANCEdivergence", "=", "np", ".", "sqrt", "(", "CHANCEdivergence", ")", "return", "[", "pcenrich", ",", "diffenrich", ",", "CHANCEdivergence", "]" ]
https://github.com/deeptools/deepTools/blob/ac42d29c298c026aa0c53c9db2553087ebc86b97/deeptools/plotFingerprint.py#L195-L233
FSecureLABS/Jandroid
e31d0dab58a2bfd6ed8e0a387172b8bd7c893436
libs/platform-tools/platform-tools_linux/systrace/catapult/devil/devil/android/device_utils.py
python
DeviceUtils._ClearCache
(self)
Clears all caches.
Clears all caches.
[ "Clears", "all", "caches", "." ]
def _ClearCache(self): """Clears all caches.""" for client in self._client_caches: self._client_caches[client].clear() self._cache = { # Map of packageId -> list of on-device .apk paths 'package_apk_paths': {}, # Set of packageId that were loaded from LoadCacheData and not yet # verified. 'package_apk_paths_to_verify': set(), # Map of packageId -> set of on-device .apk checksums 'package_apk_checksums': {}, # Map of property_name -> value 'getprop': {}, # Map of device_path -> [ignore_other_files, map of path->checksum] 'device_path_checksums': {}, # Location of sdcard ($EXTERNAL_STORAGE). 'external_storage': None, # Token used to detect when LoadCacheData is stale. 'token': None, 'prev_token': None, }
[ "def", "_ClearCache", "(", "self", ")", ":", "for", "client", "in", "self", ".", "_client_caches", ":", "self", ".", "_client_caches", "[", "client", "]", ".", "clear", "(", ")", "self", ".", "_cache", "=", "{", "# Map of packageId -> list of on-device .apk paths", "'package_apk_paths'", ":", "{", "}", ",", "# Set of packageId that were loaded from LoadCacheData and not yet", "# verified.", "'package_apk_paths_to_verify'", ":", "set", "(", ")", ",", "# Map of packageId -> set of on-device .apk checksums", "'package_apk_checksums'", ":", "{", "}", ",", "# Map of property_name -> value", "'getprop'", ":", "{", "}", ",", "# Map of device_path -> [ignore_other_files, map of path->checksum]", "'device_path_checksums'", ":", "{", "}", ",", "# Location of sdcard ($EXTERNAL_STORAGE).", "'external_storage'", ":", "None", ",", "# Token used to detect when LoadCacheData is stale.", "'token'", ":", "None", ",", "'prev_token'", ":", "None", ",", "}" ]
https://github.com/FSecureLABS/Jandroid/blob/e31d0dab58a2bfd6ed8e0a387172b8bd7c893436/libs/platform-tools/platform-tools_linux/systrace/catapult/devil/devil/android/device_utils.py#L2725-L2746
microsoft/botbuilder-python
3d410365461dc434df59bdfeaa2f16d28d9df868
libraries/botbuilder-core/botbuilder/core/message_factory.py
python
MessageFactory.carousel
( attachments: List[Attachment], text: str = None, speak: str = None, input_hint: Union[InputHints, str] = None, )
return attachment_activity( AttachmentLayoutTypes.carousel, attachments, text, speak, input_hint )
Returns a message that will display a set of attachments using a carousel layout. :Example: message = MessageFactory.carousel([CardFactory.hero_card(HeroCard(title='title1', images=[CardImage(url='imageUrl1')], buttons=[CardAction(title='button1')])), CardFactory.hero_card(HeroCard(title='title2', images=[CardImage(url='imageUrl2')], buttons=[CardAction(title='button2')])), CardFactory.hero_card(HeroCard(title='title3', images=[CardImage(url='imageUrl3')], buttons=[CardAction(title='button3')]))]) await context.send_activity(message) :param attachments: :param text: :param speak: :param input_hint: :return:
Returns a message that will display a set of attachments using a carousel layout.
[ "Returns", "a", "message", "that", "will", "display", "a", "set", "of", "attachments", "using", "a", "carousel", "layout", "." ]
def carousel( attachments: List[Attachment], text: str = None, speak: str = None, input_hint: Union[InputHints, str] = None, ) -> Activity: """ Returns a message that will display a set of attachments using a carousel layout. :Example: message = MessageFactory.carousel([CardFactory.hero_card(HeroCard(title='title1', images=[CardImage(url='imageUrl1')], buttons=[CardAction(title='button1')])), CardFactory.hero_card(HeroCard(title='title2', images=[CardImage(url='imageUrl2')], buttons=[CardAction(title='button2')])), CardFactory.hero_card(HeroCard(title='title3', images=[CardImage(url='imageUrl3')], buttons=[CardAction(title='button3')]))]) await context.send_activity(message) :param attachments: :param text: :param speak: :param input_hint: :return: """ return attachment_activity( AttachmentLayoutTypes.carousel, attachments, text, speak, input_hint )
[ "def", "carousel", "(", "attachments", ":", "List", "[", "Attachment", "]", ",", "text", ":", "str", "=", "None", ",", "speak", ":", "str", "=", "None", ",", "input_hint", ":", "Union", "[", "InputHints", ",", "str", "]", "=", "None", ",", ")", "->", "Activity", ":", "return", "attachment_activity", "(", "AttachmentLayoutTypes", ".", "carousel", ",", "attachments", ",", "text", ",", "speak", ",", "input_hint", ")" ]
https://github.com/microsoft/botbuilder-python/blob/3d410365461dc434df59bdfeaa2f16d28d9df868/libraries/botbuilder-core/botbuilder/core/message_factory.py#L160-L189
frescobaldi/frescobaldi
301cc977fc4ba7caa3df9e4bf905212ad5d06912
frescobaldi_app/file_import/__init__.py
python
FileImport.import_musicxml
(self)
Opens a MusicXML file. Converts it to ly by using musicxml2ly.
Opens a MusicXML file. Converts it to ly by using musicxml2ly.
[ "Opens", "a", "MusicXML", "file", ".", "Converts", "it", "to", "ly", "by", "using", "musicxml2ly", "." ]
def import_musicxml(self): """Opens a MusicXML file. Converts it to ly by using musicxml2ly.""" filetypes = '{0} (*.xml *.musicxml *.mxl);;{1} (*)'.format( _("MusicXML Files"), _("All Files")) caption = _("dialog title", "Import a MusicXML file") self.do_import(filetypes, caption)
[ "def", "import_musicxml", "(", "self", ")", ":", "filetypes", "=", "'{0} (*.xml *.musicxml *.mxl);;{1} (*)'", ".", "format", "(", "_", "(", "\"MusicXML Files\"", ")", ",", "_", "(", "\"All Files\"", ")", ")", "caption", "=", "_", "(", "\"dialog title\"", ",", "\"Import a MusicXML file\"", ")", "self", ".", "do_import", "(", "filetypes", ",", "caption", ")" ]
https://github.com/frescobaldi/frescobaldi/blob/301cc977fc4ba7caa3df9e4bf905212ad5d06912/frescobaldi_app/file_import/__init__.py#L129-L134
apache/libcloud
90971e17bfd7b6bb97b2489986472c531cc8e140
libcloud/compute/drivers/vsphere.py
python
VSphereNodeDriver.find_by_uuid
(self, node_uuid)
return vm
Searches VMs for a given uuid returns pyVmomi.VmomiSupport.vim.VirtualMachine
Searches VMs for a given uuid returns pyVmomi.VmomiSupport.vim.VirtualMachine
[ "Searches", "VMs", "for", "a", "given", "uuid", "returns", "pyVmomi", ".", "VmomiSupport", ".", "vim", ".", "VirtualMachine" ]
def find_by_uuid(self, node_uuid): """Searches VMs for a given uuid returns pyVmomi.VmomiSupport.vim.VirtualMachine """ vm = self.connection.content.searchIndex.FindByUuid(None, node_uuid, True, True) if not vm: # perhaps it is a moid vm = self._get_item_by_moid("VirtualMachine", node_uuid) if not vm: raise LibcloudError("Unable to locate VirtualMachine.", driver=self) return vm
[ "def", "find_by_uuid", "(", "self", ",", "node_uuid", ")", ":", "vm", "=", "self", ".", "connection", ".", "content", ".", "searchIndex", ".", "FindByUuid", "(", "None", ",", "node_uuid", ",", "True", ",", "True", ")", "if", "not", "vm", ":", "# perhaps it is a moid", "vm", "=", "self", ".", "_get_item_by_moid", "(", "\"VirtualMachine\"", ",", "node_uuid", ")", "if", "not", "vm", ":", "raise", "LibcloudError", "(", "\"Unable to locate VirtualMachine.\"", ",", "driver", "=", "self", ")", "return", "vm" ]
https://github.com/apache/libcloud/blob/90971e17bfd7b6bb97b2489986472c531cc8e140/libcloud/compute/drivers/vsphere.py#L856-L866
rosedu/wouso
82fd175354432d29b6623c150d5d82c4fe5260de
wouso/interface/activity/models.py
python
Activity.get_player_activity
(cls, player, **kwargs)
return cls.filter_activity(query)
Return an user's activity.
Return an user's activity.
[ "Return", "an", "user", "s", "activity", "." ]
def get_player_activity(cls, player, **kwargs): """ Return an user's activity. """ query = cls.queryset().filter(Q(user_to=player) | Q(user_from=player)).order_by('-timestamp') return cls.filter_activity(query)
[ "def", "get_player_activity", "(", "cls", ",", "player", ",", "*", "*", "kwargs", ")", ":", "query", "=", "cls", ".", "queryset", "(", ")", ".", "filter", "(", "Q", "(", "user_to", "=", "player", ")", "|", "Q", "(", "user_from", "=", "player", ")", ")", ".", "order_by", "(", "'-timestamp'", ")", "return", "cls", ".", "filter_activity", "(", "query", ")" ]
https://github.com/rosedu/wouso/blob/82fd175354432d29b6623c150d5d82c4fe5260de/wouso/interface/activity/models.py#L78-L83
aws-samples/ecs-blue-green-deployment
f319ca8a1e5c90ad48beaa67c4f6ea6fa51f2efb
scripts/deployer.py
python
find_beta_image_identifier
(targetgrouparn)
return identifier,imagesha
Queries the tags on TargetGroups Args: targetgrouparn - Amazon ARN of the Target group that needs to be queried for the Tags Returns: identifier : tag key value of the target group , with KeyName as "Identifier" sha: Sha or the image id running on target group Raises: Exception: Any exception thrown by handler
Queries the tags on TargetGroups
[ "Queries", "the", "tags", "on", "TargetGroups" ]
def find_beta_image_identifier(targetgrouparn): """Queries the tags on TargetGroups Args: targetgrouparn - Amazon ARN of the Target group that needs to be queried for the Tags Returns: identifier : tag key value of the target group , with KeyName as "Identifier" sha: Sha or the image id running on target group Raises: Exception: Any exception thrown by handler """ response = elb_client.describe_tags(ResourceArns=[targetgrouparn]) identifier = None imagesha = None for tags in response['TagDescriptions']: for tag in tags['Tags']: if tag['Key'] == "Identifier": print("Image identifier string on " + targetgrouparn + " : " + tag['Value']) identifier = tag['Value'] if tag['Key'] == "Image": imagesha = tag['Value'] return identifier,imagesha
[ "def", "find_beta_image_identifier", "(", "targetgrouparn", ")", ":", "response", "=", "elb_client", ".", "describe_tags", "(", "ResourceArns", "=", "[", "targetgrouparn", "]", ")", "identifier", "=", "None", "imagesha", "=", "None", "for", "tags", "in", "response", "[", "'TagDescriptions'", "]", ":", "for", "tag", "in", "tags", "[", "'Tags'", "]", ":", "if", "tag", "[", "'Key'", "]", "==", "\"Identifier\"", ":", "print", "(", "\"Image identifier string on \"", "+", "targetgrouparn", "+", "\" : \"", "+", "tag", "[", "'Value'", "]", ")", "identifier", "=", "tag", "[", "'Value'", "]", "if", "tag", "[", "'Key'", "]", "==", "\"Image\"", ":", "imagesha", "=", "tag", "[", "'Value'", "]", "return", "identifier", ",", "imagesha" ]
https://github.com/aws-samples/ecs-blue-green-deployment/blob/f319ca8a1e5c90ad48beaa67c4f6ea6fa51f2efb/scripts/deployer.py#L108-L131
nodesign/weio
1d67d705a5c36a2e825ad13feab910b0aca9a2e8
things/input/environmental/digitemp/device/termometer.py
python
OneWireTemperatureSensor._reset
(self)
Send reset pulse, wait for presence and then select the device.
Send reset pulse, wait for presence and then select the device.
[ "Send", "reset", "pulse", "wait", "for", "presence", "and", "then", "select", "the", "device", "." ]
def _reset(self): """ Send reset pulse, wait for presence and then select the device. """ if self.single_mode: self._skip_ROM() # because it is single device else: self._match_ROM(self.rom_code)
[ "def", "_reset", "(", "self", ")", ":", "if", "self", ".", "single_mode", ":", "self", ".", "_skip_ROM", "(", ")", "# because it is single device", "else", ":", "self", ".", "_match_ROM", "(", "self", ".", "rom_code", ")" ]
https://github.com/nodesign/weio/blob/1d67d705a5c36a2e825ad13feab910b0aca9a2e8/things/input/environmental/digitemp/device/termometer.py#L160-L167
w3h/isf
6faf0a3df185465ec17369c90ccc16e2a03a1870
lib/thirdparty/scapy/contrib/gsm_um.py
python
activateAaPdpContextReject
(ProtocolConfigurationOptions_presence=0)
return packet
ACTIVATE AA PDP CONTEXT REJECT Section 9.5.12
ACTIVATE AA PDP CONTEXT REJECT Section 9.5.12
[ "ACTIVATE", "AA", "PDP", "CONTEXT", "REJECT", "Section", "9", ".", "5", ".", "12" ]
def activateAaPdpContextReject(ProtocolConfigurationOptions_presence=0): """ACTIVATE AA PDP CONTEXT REJECT Section 9.5.12""" a = TpPd(pd=0x8) b = MessageType(mesType=0x52) # 01010010 c = SmCause() packet = a / b / c if ProtocolConfigurationOptions_presence is 1: d = ProtocolConfigurationOptions(ieiPCO=0x27) packet = packet / d return packet
[ "def", "activateAaPdpContextReject", "(", "ProtocolConfigurationOptions_presence", "=", "0", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "0x8", ")", "b", "=", "MessageType", "(", "mesType", "=", "0x52", ")", "# 01010010", "c", "=", "SmCause", "(", ")", "packet", "=", "a", "/", "b", "/", "c", "if", "ProtocolConfigurationOptions_presence", "is", "1", ":", "d", "=", "ProtocolConfigurationOptions", "(", "ieiPCO", "=", "0x27", ")", "packet", "=", "packet", "/", "d", "return", "packet" ]
https://github.com/w3h/isf/blob/6faf0a3df185465ec17369c90ccc16e2a03a1870/lib/thirdparty/scapy/contrib/gsm_um.py#L2777-L2786
openimages/dataset
077282972acd0ad8628f1526760ad239a38a8a97
tools/compute_bottleneck.py
python
main
(args)
[]
def main(args): if not os.path.exists(FLAGS.checkpoint): tf.logging.fatal( 'Checkpoint %s does not exist. Have you download it? See tools/download_data.sh', FLAGS.checkpoint) g = tf.Graph() with g.as_default(): input_image = PreprocessImage(FLAGS.image_path[0]) with slim.arg_scope(inception.inception_v3_arg_scope()): logits, end_points = inception.inception_v3( input_image, num_classes=FLAGS.num_classes, is_training=False) bottleneck = end_points['PreLogits'] init_op = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer(), tf.tables_initializer()) saver = tf_saver.Saver() sess = tf.Session() saver.restore(sess, FLAGS.checkpoint) # Run the evaluation on the image bottleneck_eval = np.squeeze(sess.run(bottleneck)) first = True for val in bottleneck_eval: if not first: sys.stdout.write(",") first = False sys.stdout.write('{:.3f}'.format(val)) sys.stdout.write('\n')
[ "def", "main", "(", "args", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "FLAGS", ".", "checkpoint", ")", ":", "tf", ".", "logging", ".", "fatal", "(", "'Checkpoint %s does not exist. Have you download it? See tools/download_data.sh'", ",", "FLAGS", ".", "checkpoint", ")", "g", "=", "tf", ".", "Graph", "(", ")", "with", "g", ".", "as_default", "(", ")", ":", "input_image", "=", "PreprocessImage", "(", "FLAGS", ".", "image_path", "[", "0", "]", ")", "with", "slim", ".", "arg_scope", "(", "inception", ".", "inception_v3_arg_scope", "(", ")", ")", ":", "logits", ",", "end_points", "=", "inception", ".", "inception_v3", "(", "input_image", ",", "num_classes", "=", "FLAGS", ".", "num_classes", ",", "is_training", "=", "False", ")", "bottleneck", "=", "end_points", "[", "'PreLogits'", "]", "init_op", "=", "tf", ".", "group", "(", "tf", ".", "global_variables_initializer", "(", ")", ",", "tf", ".", "local_variables_initializer", "(", ")", ",", "tf", ".", "tables_initializer", "(", ")", ")", "saver", "=", "tf_saver", ".", "Saver", "(", ")", "sess", "=", "tf", ".", "Session", "(", ")", "saver", ".", "restore", "(", "sess", ",", "FLAGS", ".", "checkpoint", ")", "# Run the evaluation on the image", "bottleneck_eval", "=", "np", ".", "squeeze", "(", "sess", ".", "run", "(", "bottleneck", ")", ")", "first", "=", "True", "for", "val", "in", "bottleneck_eval", ":", "if", "not", "first", ":", "sys", ".", "stdout", ".", "write", "(", "\",\"", ")", "first", "=", "False", "sys", ".", "stdout", ".", "write", "(", "'{:.3f}'", ".", "format", "(", "val", ")", ")", "sys", ".", "stdout", ".", "write", "(", "'\\n'", ")" ]
https://github.com/openimages/dataset/blob/077282972acd0ad8628f1526760ad239a38a8a97/tools/compute_bottleneck.py#L76-L106
lovelylain/pyctp
fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d
lts/ctp/__init__.py
python
TraderApi.ReqQryTradingAccount
(self, pQryTradingAccount, nRequestID)
return 0
请求查询资金账户
请求查询资金账户
[ "请求查询资金账户" ]
def ReqQryTradingAccount(self, pQryTradingAccount, nRequestID): """请求查询资金账户""" return 0
[ "def", "ReqQryTradingAccount", "(", "self", ",", "pQryTradingAccount", ",", "nRequestID", ")", ":", "return", "0" ]
https://github.com/lovelylain/pyctp/blob/fd304de4b50c4ddc31a4190b1caaeb5dec66bc5d/lts/ctp/__init__.py#L208-L210
ask/carrot
5889a25cd2e274642071c9bba39772f4b3e3d9da
carrot/connection.py
python
BrokerConnection.info
(self)
return {"hostname": self.hostname, "userid": self.userid, "password": self.password, "virtual_host": self.virtual_host, "port": port, "insist": self.insist, "ssl": self.ssl, "transport_cls": backend_cls, "backend_cls": backend_cls, "connect_timeout": self.connect_timeout}
Get connection info.
Get connection info.
[ "Get", "connection", "info", "." ]
def info(self): """Get connection info.""" backend_cls = self.backend_cls or "amqplib" port = self.port or self.create_backend().default_port return {"hostname": self.hostname, "userid": self.userid, "password": self.password, "virtual_host": self.virtual_host, "port": port, "insist": self.insist, "ssl": self.ssl, "transport_cls": backend_cls, "backend_cls": backend_cls, "connect_timeout": self.connect_timeout}
[ "def", "info", "(", "self", ")", ":", "backend_cls", "=", "self", ".", "backend_cls", "or", "\"amqplib\"", "port", "=", "self", ".", "port", "or", "self", ".", "create_backend", "(", ")", ".", "default_port", "return", "{", "\"hostname\"", ":", "self", ".", "hostname", ",", "\"userid\"", ":", "self", ".", "userid", ",", "\"password\"", ":", "self", ".", "password", ",", "\"virtual_host\"", ":", "self", ".", "virtual_host", ",", "\"port\"", ":", "port", ",", "\"insist\"", ":", "self", ".", "insist", ",", "\"ssl\"", ":", "self", ".", "ssl", ",", "\"transport_cls\"", ":", "backend_cls", ",", "\"backend_cls\"", ":", "backend_cls", ",", "\"connect_timeout\"", ":", "self", ".", "connect_timeout", "}" ]
https://github.com/ask/carrot/blob/5889a25cd2e274642071c9bba39772f4b3e3d9da/carrot/connection.py#L223-L236
gramps-project/gramps
04d4651a43eb210192f40a9f8c2bad8ee8fa3753
gramps/gui/views/treemodels/treebasemodel.py
python
TreeBaseModel.__rebuild_filter
(self, dfilter, skip, items, gen_cursor, data_map, add_func)
Rebuild the data map for a single Gramps object type, where a filter is applied.
Rebuild the data map for a single Gramps object type, where a filter is applied.
[ "Rebuild", "the", "data", "map", "for", "a", "single", "Gramps", "object", "type", "where", "a", "filter", "is", "applied", "." ]
def __rebuild_filter(self, dfilter, skip, items, gen_cursor, data_map, add_func): """ Rebuild the data map for a single Gramps object type, where a filter is applied. """ pmon = progressdlg.ProgressMonitor( progressdlg.StatusProgress, (self.uistate,), popup_time=2, title=_("Loading items...")) status_ppl = progressdlg.LongOpStatus(total_steps=items, interval=items // 20) pmon.add_op(status_ppl) self.__total += items assert not skip if dfilter: cdb = CacheProxyDb(self.db) for handle in dfilter.apply(cdb, tree=True, user=User(parent=self.uistate.window, uistate=self.uistate)): status_ppl.heartbeat() data = data_map(handle) add_func(handle, data) self.__displayed += 1 else: with gen_cursor() as cursor: for handle, data in cursor: status_ppl.heartbeat() add_func(handle, data) self.__displayed += 1 status_ppl.end()
[ "def", "__rebuild_filter", "(", "self", ",", "dfilter", ",", "skip", ",", "items", ",", "gen_cursor", ",", "data_map", ",", "add_func", ")", ":", "pmon", "=", "progressdlg", ".", "ProgressMonitor", "(", "progressdlg", ".", "StatusProgress", ",", "(", "self", ".", "uistate", ",", ")", ",", "popup_time", "=", "2", ",", "title", "=", "_", "(", "\"Loading items...\"", ")", ")", "status_ppl", "=", "progressdlg", ".", "LongOpStatus", "(", "total_steps", "=", "items", ",", "interval", "=", "items", "//", "20", ")", "pmon", ".", "add_op", "(", "status_ppl", ")", "self", ".", "__total", "+=", "items", "assert", "not", "skip", "if", "dfilter", ":", "cdb", "=", "CacheProxyDb", "(", "self", ".", "db", ")", "for", "handle", "in", "dfilter", ".", "apply", "(", "cdb", ",", "tree", "=", "True", ",", "user", "=", "User", "(", "parent", "=", "self", ".", "uistate", ".", "window", ",", "uistate", "=", "self", ".", "uistate", ")", ")", ":", "status_ppl", ".", "heartbeat", "(", ")", "data", "=", "data_map", "(", "handle", ")", "add_func", "(", "handle", ",", "data", ")", "self", ".", "__displayed", "+=", "1", "else", ":", "with", "gen_cursor", "(", ")", "as", "cursor", ":", "for", "handle", ",", "data", "in", "cursor", ":", "status_ppl", ".", "heartbeat", "(", ")", "add_func", "(", "handle", ",", "data", ")", "self", ".", "__displayed", "+=", "1", "status_ppl", ".", "end", "(", ")" ]
https://github.com/gramps-project/gramps/blob/04d4651a43eb210192f40a9f8c2bad8ee8fa3753/gramps/gui/views/treemodels/treebasemodel.py#L572-L603
securesystemslab/zippy
ff0e84ac99442c2c55fe1d285332cfd4e185e089
zippy/benchmarks/src/benchmarks/sympy/sympy/galgebra/ncutil.py
python
get_commutative_coef
(expr)
return S.One
[]
def get_commutative_coef(expr): if isinstance(expr, Mul): (coefs, bases) = expr.args_cnc() return Mul(*coefs) return S.One
[ "def", "get_commutative_coef", "(", "expr", ")", ":", "if", "isinstance", "(", "expr", ",", "Mul", ")", ":", "(", "coefs", ",", "bases", ")", "=", "expr", ".", "args_cnc", "(", ")", "return", "Mul", "(", "*", "coefs", ")", "return", "S", ".", "One" ]
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/galgebra/ncutil.py#L24-L28
IJDykeman/wangTiles
7c1ee2095ebdf7f72bce07d94c6484915d5cae8b
experimental_code/tiles_3d/venv/lib/python2.7/site-packages/pip/_vendor/distlib/locators.py
python
Page.__init__
(self, data, url)
Initialise an instance with the Unicode page contents and the URL they came from.
Initialise an instance with the Unicode page contents and the URL they came from.
[ "Initialise", "an", "instance", "with", "the", "Unicode", "page", "contents", "and", "the", "URL", "they", "came", "from", "." ]
def __init__(self, data, url): """ Initialise an instance with the Unicode page contents and the URL they came from. """ self.data = data self.base_url = self.url = url m = self._base.search(self.data) if m: self.base_url = m.group(1)
[ "def", "__init__", "(", "self", ",", "data", ",", "url", ")", ":", "self", ".", "data", "=", "data", "self", ".", "base_url", "=", "self", ".", "url", "=", "url", "m", "=", "self", ".", "_base", ".", "search", "(", "self", ".", "data", ")", "if", "m", ":", "self", ".", "base_url", "=", "m", ".", "group", "(", "1", ")" ]
https://github.com/IJDykeman/wangTiles/blob/7c1ee2095ebdf7f72bce07d94c6484915d5cae8b/experimental_code/tiles_3d/venv/lib/python2.7/site-packages/pip/_vendor/distlib/locators.py#L461-L470
GNS3/gns3-gui
da8adbaa18ab60e053af2a619efd468f4c8950f3
gns3/base_node.py
python
BaseNode.reset
(cls)
Reset the instance count.
Reset the instance count.
[ "Reset", "the", "instance", "count", "." ]
def reset(cls): """ Reset the instance count. """ cls._instance_count = 1
[ "def", "reset", "(", "cls", ")", ":", "cls", ".", "_instance_count", "=", "1" ]
https://github.com/GNS3/gns3-gui/blob/da8adbaa18ab60e053af2a619efd468f4c8950f3/gns3/base_node.py#L125-L130
python-trio/trio
4edfd41bd5519a2e626e87f6c6ca9fb32b90a6f4
trio/_core/_traps.py
python
cancel_shielded_checkpoint
()
return (await _async_yield(CancelShieldedCheckpoint)).unwrap()
Introduce a schedule point, but not a cancel point. This is *not* a :ref:`checkpoint <checkpoints>`, but it is half of a checkpoint, and when combined with :func:`checkpoint_if_cancelled` it can make a full checkpoint. Equivalent to (but potentially more efficient than):: with trio.CancelScope(shield=True): await trio.lowlevel.checkpoint()
Introduce a schedule point, but not a cancel point.
[ "Introduce", "a", "schedule", "point", "but", "not", "a", "cancel", "point", "." ]
async def cancel_shielded_checkpoint(): """Introduce a schedule point, but not a cancel point. This is *not* a :ref:`checkpoint <checkpoints>`, but it is half of a checkpoint, and when combined with :func:`checkpoint_if_cancelled` it can make a full checkpoint. Equivalent to (but potentially more efficient than):: with trio.CancelScope(shield=True): await trio.lowlevel.checkpoint() """ return (await _async_yield(CancelShieldedCheckpoint)).unwrap()
[ "async", "def", "cancel_shielded_checkpoint", "(", ")", ":", "return", "(", "await", "_async_yield", "(", "CancelShieldedCheckpoint", ")", ")", ".", "unwrap", "(", ")" ]
https://github.com/python-trio/trio/blob/4edfd41bd5519a2e626e87f6c6ca9fb32b90a6f4/trio/_core/_traps.py#L30-L43
thaines/helit
04bd36ee0fb6b762c63d746e2cd8813641dceda9
video/video_node.py
python
VideoNode.frameCount
(self)
Returns the number of times you can call nextFrame before it starts returning None.
Returns the number of times you can call nextFrame before it starts returning None.
[ "Returns", "the", "number", "of", "times", "you", "can", "call", "nextFrame", "before", "it", "starts", "returning", "None", "." ]
def frameCount(self): """Returns the number of times you can call nextFrame before it starts returning None.""" raise Exception('frameCount not implimented')
[ "def", "frameCount", "(", "self", ")", ":", "raise", "Exception", "(", "'frameCount not implimented'", ")" ]
https://github.com/thaines/helit/blob/04bd36ee0fb6b762c63d746e2cd8813641dceda9/video/video_node.py#L46-L48
mrlesmithjr/Ansible
d44f0dc0d942bdf3bf7334b307e6048f0ee16e36
roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/index.py
python
HTMLPage.links
(self)
Yields all links in the page
Yields all links in the page
[ "Yields", "all", "links", "in", "the", "page" ]
def links(self): """Yields all links in the page""" for anchor in self.parsed.findall(".//a"): if anchor.get("href"): href = anchor.get("href") url = self.clean_link( urllib_parse.urljoin(self.base_url, href) ) pyrequire = anchor.get('data-requires-python') pyrequire = unescape(pyrequire) if pyrequire else None yield Link(url, self, requires_python=pyrequire)
[ "def", "links", "(", "self", ")", ":", "for", "anchor", "in", "self", ".", "parsed", ".", "findall", "(", "\".//a\"", ")", ":", "if", "anchor", ".", "get", "(", "\"href\"", ")", ":", "href", "=", "anchor", ".", "get", "(", "\"href\"", ")", "url", "=", "self", ".", "clean_link", "(", "urllib_parse", ".", "urljoin", "(", "self", ".", "base_url", ",", "href", ")", ")", "pyrequire", "=", "anchor", ".", "get", "(", "'data-requires-python'", ")", "pyrequire", "=", "unescape", "(", "pyrequire", ")", "if", "pyrequire", "else", "None", "yield", "Link", "(", "url", ",", "self", ",", "requires_python", "=", "pyrequire", ")" ]
https://github.com/mrlesmithjr/Ansible/blob/d44f0dc0d942bdf3bf7334b307e6048f0ee16e36/roles/ansible-vsphere-management/scripts/pdns/lib/python2.7/site-packages/pip/index.py#L858-L868
karanchahal/distiller
a17ec06cbeafcdd2aea19d7c7663033c951392f5
models/cifar10/shufflenet.py
python
ShuffleNetG3
()
return ShuffleNet(cfg)
[]
def ShuffleNetG3(): cfg = { 'out_planes': [240,480,960], 'num_blocks': [4,8,4], 'groups': 3 } return ShuffleNet(cfg)
[ "def", "ShuffleNetG3", "(", ")", ":", "cfg", "=", "{", "'out_planes'", ":", "[", "240", ",", "480", ",", "960", "]", ",", "'num_blocks'", ":", "[", "4", ",", "8", ",", "4", "]", ",", "'groups'", ":", "3", "}", "return", "ShuffleNet", "(", "cfg", ")" ]
https://github.com/karanchahal/distiller/blob/a17ec06cbeafcdd2aea19d7c7663033c951392f5/models/cifar10/shufflenet.py#L94-L100
Azure/azure-devops-cli-extension
11334cd55806bef0b99c3bee5a438eed71e44037
azure-devops/azext_devops/devops_sdk/released/build/build_client.py
python
BuildClient.add_build_tags
(self, tags, project, build_id)
return self._deserialize('[str]', self._unwrap_collection(response))
AddBuildTags. Adds tags to a build. :param [str] tags: The tags to add. :param str project: Project ID or project name :param int build_id: The ID of the build. :rtype: [str]
AddBuildTags. Adds tags to a build. :param [str] tags: The tags to add. :param str project: Project ID or project name :param int build_id: The ID of the build. :rtype: [str]
[ "AddBuildTags", ".", "Adds", "tags", "to", "a", "build", ".", ":", "param", "[", "str", "]", "tags", ":", "The", "tags", "to", "add", ".", ":", "param", "str", "project", ":", "Project", "ID", "or", "project", "name", ":", "param", "int", "build_id", ":", "The", "ID", "of", "the", "build", ".", ":", "rtype", ":", "[", "str", "]" ]
def add_build_tags(self, tags, project, build_id): """AddBuildTags. Adds tags to a build. :param [str] tags: The tags to add. :param str project: Project ID or project name :param int build_id: The ID of the build. :rtype: [str] """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') if build_id is not None: route_values['buildId'] = self._serialize.url('build_id', build_id, 'int') content = self._serialize.body(tags, '[str]') response = self._send(http_method='POST', location_id='6e6114b2-8161-44c8-8f6c-c5505782427f', version='5.1', route_values=route_values, content=content) return self._deserialize('[str]', self._unwrap_collection(response))
[ "def", "add_build_tags", "(", "self", ",", "tags", ",", "project", ",", "build_id", ")", ":", "route_values", "=", "{", "}", "if", "project", "is", "not", "None", ":", "route_values", "[", "'project'", "]", "=", "self", ".", "_serialize", ".", "url", "(", "'project'", ",", "project", ",", "'str'", ")", "if", "build_id", "is", "not", "None", ":", "route_values", "[", "'buildId'", "]", "=", "self", ".", "_serialize", ".", "url", "(", "'build_id'", ",", "build_id", ",", "'int'", ")", "content", "=", "self", ".", "_serialize", ".", "body", "(", "tags", ",", "'[str]'", ")", "response", "=", "self", ".", "_send", "(", "http_method", "=", "'POST'", ",", "location_id", "=", "'6e6114b2-8161-44c8-8f6c-c5505782427f'", ",", "version", "=", "'5.1'", ",", "route_values", "=", "route_values", ",", "content", "=", "content", ")", "return", "self", ".", "_deserialize", "(", "'[str]'", ",", "self", ".", "_unwrap_collection", "(", "response", ")", ")" ]
https://github.com/Azure/azure-devops-cli-extension/blob/11334cd55806bef0b99c3bee5a438eed71e44037/azure-devops/azext_devops/devops_sdk/released/build/build_client.py#L868-L887
open-io/oio-sds
16041950b6056a55d5ce7ca77795defe6dfa6c61
oio/directory/client.py
python
DirectoryClient.unlink
(self, account=None, reference=None, service_type=None, cid=None, **kwargs)
Remove an associated service from the reference
Remove an associated service from the reference
[ "Remove", "an", "associated", "service", "from", "the", "reference" ]
def unlink(self, account=None, reference=None, service_type=None, cid=None, **kwargs): """ Remove an associated service from the reference """ params = self._make_params(account, reference, service_type, cid=cid) _resp, _body = self._request('POST', '/unlink', params=params, **kwargs)
[ "def", "unlink", "(", "self", ",", "account", "=", "None", ",", "reference", "=", "None", ",", "service_type", "=", "None", ",", "cid", "=", "None", ",", "*", "*", "kwargs", ")", ":", "params", "=", "self", ".", "_make_params", "(", "account", ",", "reference", ",", "service_type", ",", "cid", "=", "cid", ")", "_resp", ",", "_body", "=", "self", ".", "_request", "(", "'POST'", ",", "'/unlink'", ",", "params", "=", "params", ",", "*", "*", "kwargs", ")" ]
https://github.com/open-io/oio-sds/blob/16041950b6056a55d5ce7ca77795defe6dfa6c61/oio/directory/client.py#L147-L154
exaile/exaile
a7b58996c5c15b3aa7b9975ac13ee8f784ef4689
xlgui/preferences/widgets.py
python
CheckConditional.on_check_condition
(self)
return self.get_condition_value()
Specifies the condition to meet :returns: Whether the condition is met or not :rtype: bool
Specifies the condition to meet
[ "Specifies", "the", "condition", "to", "meet" ]
def on_check_condition(self): """ Specifies the condition to meet :returns: Whether the condition is met or not :rtype: bool """ return self.get_condition_value()
[ "def", "on_check_condition", "(", "self", ")", ":", "return", "self", ".", "get_condition_value", "(", ")" ]
https://github.com/exaile/exaile/blob/a7b58996c5c15b3aa7b9975ac13ee8f784ef4689/xlgui/preferences/widgets.py#L237-L244
googleads/google-ads-python
2a1d6062221f6aad1992a6bcca0e7e4a93d2db86
google/ads/googleads/v9/services/services/asset_field_type_view_service/client.py
python
AssetFieldTypeViewServiceClient.from_service_account_file
(cls, filename: str, *args, **kwargs)
return cls(*args, **kwargs)
Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: AssetFieldTypeViewServiceClient: The constructed client.
Creates an instance of this client using the provided credentials file.
[ "Creates", "an", "instance", "of", "this", "client", "using", "the", "provided", "credentials", "file", "." ]
def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: AssetFieldTypeViewServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( filename ) kwargs["credentials"] = credentials return cls(*args, **kwargs)
[ "def", "from_service_account_file", "(", "cls", ",", "filename", ":", "str", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "credentials", "=", "service_account", ".", "Credentials", ".", "from_service_account_file", "(", "filename", ")", "kwargs", "[", "\"credentials\"", "]", "=", "credentials", "return", "cls", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/googleads/google-ads-python/blob/2a1d6062221f6aad1992a6bcca0e7e4a93d2db86/google/ads/googleads/v9/services/services/asset_field_type_view_service/client.py#L136-L153
okpy/ok
50a00190f05363d096478dd8e53aa1a36dd40c4a
migrations/env.py
python
run_migrations_offline
()
Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output.
Run migrations in 'offline' mode.
[ "Run", "migrations", "in", "offline", "mode", "." ]
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure(url=url) with context.begin_transaction(): context.run_migrations()
[ "def", "run_migrations_offline", "(", ")", ":", "url", "=", "config", ".", "get_main_option", "(", "\"sqlalchemy.url\"", ")", "context", ".", "configure", "(", "url", "=", "url", ")", "with", "context", ".", "begin_transaction", "(", ")", ":", "context", ".", "run_migrations", "(", ")" ]
https://github.com/okpy/ok/blob/50a00190f05363d096478dd8e53aa1a36dd40c4a/migrations/env.py#L34-L50
Delta-ML/delta
31dfebc8f20b7cb282b62f291ff25a87e403cc86
delta/data/feat/speech_feature.py
python
extract_fbank
(samples, sr=8000, winlen=0.025, winstep=0.01, feature_size=40)
return np.stack([feat, delta, _delta_delta], axis=-1)
extract logfbank with delta and delta-delta Return: ndarray of shape [nfrmae, feature_size * 3]
extract logfbank with delta and delta-delta Return: ndarray of shape [nfrmae, feature_size * 3]
[ "extract", "logfbank", "with", "delta", "and", "delta", "-", "delta", "Return", ":", "ndarray", "of", "shape", "[", "nfrmae", "feature_size", "*", "3", "]" ]
def extract_fbank(samples, sr=8000, winlen=0.025, winstep=0.01, feature_size=40): ''' extract logfbank with delta and delta-delta Return: ndarray of shape [nfrmae, feature_size * 3] ''' feat = psf.logfbank( samples, nfilt=feature_size, samplerate=sr, winlen=winlen, winstep=winstep, lowfreq=0, highfreq=None, preemph=0.97) delta = psf.delta(feat, N=2) _delta_delta = psf.delta(delta, N=2) return np.stack([feat, delta, _delta_delta], axis=-1)
[ "def", "extract_fbank", "(", "samples", ",", "sr", "=", "8000", ",", "winlen", "=", "0.025", ",", "winstep", "=", "0.01", ",", "feature_size", "=", "40", ")", ":", "feat", "=", "psf", ".", "logfbank", "(", "samples", ",", "nfilt", "=", "feature_size", ",", "samplerate", "=", "sr", ",", "winlen", "=", "winlen", ",", "winstep", "=", "winstep", ",", "lowfreq", "=", "0", ",", "highfreq", "=", "None", ",", "preemph", "=", "0.97", ")", "delta", "=", "psf", ".", "delta", "(", "feat", ",", "N", "=", "2", ")", "_delta_delta", "=", "psf", ".", "delta", "(", "delta", ",", "N", "=", "2", ")", "return", "np", ".", "stack", "(", "[", "feat", ",", "delta", ",", "_delta_delta", "]", ",", "axis", "=", "-", "1", ")" ]
https://github.com/Delta-ML/delta/blob/31dfebc8f20b7cb282b62f291ff25a87e403cc86/delta/data/feat/speech_feature.py#L196-L216
john-hewitt/structural-probes
4c2e265d6bd071e6ab380fd9806e4c6a128b5e97
structural-probes/probe.py
python
OneWordPSDProbe.forward
(self, batch)
return norms
Computes all n depths after projection for each sentence in a batch. Computes (Bh_i)^T(Bh_i) for all i Args: batch: a batch of word representations of the shape (batch_size, max_seq_len, representation_dim) Returns: A tensor of depths of shape (batch_size, max_seq_len)
Computes all n depths after projection for each sentence in a batch.
[ "Computes", "all", "n", "depths", "after", "projection", "for", "each", "sentence", "in", "a", "batch", "." ]
def forward(self, batch): """ Computes all n depths after projection for each sentence in a batch. Computes (Bh_i)^T(Bh_i) for all i Args: batch: a batch of word representations of the shape (batch_size, max_seq_len, representation_dim) Returns: A tensor of depths of shape (batch_size, max_seq_len) """ transformed = torch.matmul(batch, self.proj) batchlen, seqlen, rank = transformed.size() norms = torch.bmm(transformed.view(batchlen* seqlen, 1, rank), transformed.view(batchlen* seqlen, rank, 1)) norms = norms.view(batchlen, seqlen) return norms
[ "def", "forward", "(", "self", ",", "batch", ")", ":", "transformed", "=", "torch", ".", "matmul", "(", "batch", ",", "self", ".", "proj", ")", "batchlen", ",", "seqlen", ",", "rank", "=", "transformed", ".", "size", "(", ")", "norms", "=", "torch", ".", "bmm", "(", "transformed", ".", "view", "(", "batchlen", "*", "seqlen", ",", "1", ",", "rank", ")", ",", "transformed", ".", "view", "(", "batchlen", "*", "seqlen", ",", "rank", ",", "1", ")", ")", "norms", "=", "norms", ".", "view", "(", "batchlen", ",", "seqlen", ")", "return", "norms" ]
https://github.com/john-hewitt/structural-probes/blob/4c2e265d6bd071e6ab380fd9806e4c6a128b5e97/structural-probes/probe.py#L63-L80
chribsen/simple-machine-learning-examples
dc94e52a4cebdc8bb959ff88b81ff8cfeca25022
venv/lib/python2.7/site-packages/pandas/stats/ols.py
python
OLS.rmse
(self)
return self._rmse_raw
Returns the rmse value.
Returns the rmse value.
[ "Returns", "the", "rmse", "value", "." ]
def rmse(self): """Returns the rmse value.""" return self._rmse_raw
[ "def", "rmse", "(", "self", ")", ":", "return", "self", ".", "_rmse_raw" ]
https://github.com/chribsen/simple-machine-learning-examples/blob/dc94e52a4cebdc8bb959ff88b81ff8cfeca25022/venv/lib/python2.7/site-packages/pandas/stats/ols.py#L329-L331
kylebebak/Requester
4a9f9f051fa5fc951a8f7ad098a328261ca2db97
deps/oauthlib/oauth2/rfc6749/utils.py
python
list_to_scope
(scope)
Convert a list of scopes to a space separated string.
Convert a list of scopes to a space separated string.
[ "Convert", "a", "list", "of", "scopes", "to", "a", "space", "separated", "string", "." ]
def list_to_scope(scope): """Convert a list of scopes to a space separated string.""" if isinstance(scope, unicode_type) or scope is None: return scope elif isinstance(scope, (set, tuple, list)): return " ".join([unicode_type(s) for s in scope]) else: raise ValueError("Invalid scope (%s), must be string, tuple, set, or list." % scope)
[ "def", "list_to_scope", "(", "scope", ")", ":", "if", "isinstance", "(", "scope", ",", "unicode_type", ")", "or", "scope", "is", "None", ":", "return", "scope", "elif", "isinstance", "(", "scope", ",", "(", "set", ",", "tuple", ",", "list", ")", ")", ":", "return", "\" \"", ".", "join", "(", "[", "unicode_type", "(", "s", ")", "for", "s", "in", "scope", "]", ")", "else", ":", "raise", "ValueError", "(", "\"Invalid scope (%s), must be string, tuple, set, or list.\"", "%", "scope", ")" ]
https://github.com/kylebebak/Requester/blob/4a9f9f051fa5fc951a8f7ad098a328261ca2db97/deps/oauthlib/oauth2/rfc6749/utils.py#L25-L32
aliyun/aliyun-openapi-python-sdk
bda53176cc9cf07605b1cf769f0df444cca626a0
aliyun-python-sdk-core-v3/aliyunsdkcore/vendored/requests/utils.py
python
guess_filename
(obj)
Tries to guess the filename of the given object.
Tries to guess the filename of the given object.
[ "Tries", "to", "guess", "the", "filename", "of", "the", "given", "object", "." ]
def guess_filename(obj): """Tries to guess the filename of the given object.""" name = getattr(obj, 'name', None) if (name and isinstance(name, basestring) and name[0] != '<' and name[-1] != '>'): return os.path.basename(name)
[ "def", "guess_filename", "(", "obj", ")", ":", "name", "=", "getattr", "(", "obj", ",", "'name'", ",", "None", ")", "if", "(", "name", "and", "isinstance", "(", "name", ",", "basestring", ")", "and", "name", "[", "0", "]", "!=", "'<'", "and", "name", "[", "-", "1", "]", "!=", "'>'", ")", ":", "return", "os", ".", "path", ".", "basename", "(", "name", ")" ]
https://github.com/aliyun/aliyun-openapi-python-sdk/blob/bda53176cc9cf07605b1cf769f0df444cca626a0/aliyun-python-sdk-core-v3/aliyunsdkcore/vendored/requests/utils.py#L210-L215
zhl2008/awd-platform
0416b31abea29743387b10b3914581fbe8e7da5e
web_flaskbb/lib/python2.7/site-packages/setuptools/extern/__init__.py
python
VendorImporter.search_path
(self)
Search first the vendor package then as a natural package.
Search first the vendor package then as a natural package.
[ "Search", "first", "the", "vendor", "package", "then", "as", "a", "natural", "package", "." ]
def search_path(self): """ Search first the vendor package then as a natural package. """ yield self.vendor_pkg + '.' yield ''
[ "def", "search_path", "(", "self", ")", ":", "yield", "self", ".", "vendor_pkg", "+", "'.'", "yield", "''" ]
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/setuptools/extern/__init__.py#L16-L21
mne-tools/mne-python
f90b303ce66a8415e64edd4605b09ac0179c1ebf
mne/viz/topo.py
python
_erfimage_imshow_unified
(bn, ch_idx, tmin, tmax, vmin, vmax, ylim=None, data=None, epochs=None, sigma=None, order=None, scalings=None, vline=None, x_label=None, y_label=None, colorbar=False, cmap='RdBu_r', vlim_array=None)
Plot erfimage topography using a single axis.
Plot erfimage topography using a single axis.
[ "Plot", "erfimage", "topography", "using", "a", "single", "axis", "." ]
def _erfimage_imshow_unified(bn, ch_idx, tmin, tmax, vmin, vmax, ylim=None, data=None, epochs=None, sigma=None, order=None, scalings=None, vline=None, x_label=None, y_label=None, colorbar=False, cmap='RdBu_r', vlim_array=None): """Plot erfimage topography using a single axis.""" from scipy import ndimage _compute_ax_scalings(bn, (tmin, tmax), (0, len(epochs.events))) ax = bn.ax data_lines = bn.data_lines extent = (bn.x_t + bn.x_s * tmin, bn.x_t + bn.x_s * tmax, bn.y_t, bn.y_t + bn.y_s * len(epochs.events)) this_data = data[:, ch_idx, :] vmin, vmax = (None, None) if vlim_array is None else vlim_array[ch_idx] if callable(order): order = order(epochs.times, this_data) if order is not None: this_data = this_data[order] if sigma > 0.: this_data = ndimage.gaussian_filter1d(this_data, sigma=sigma, axis=0) data_lines.append(ax.imshow(this_data, extent=extent, aspect='auto', origin='lower', vmin=vmin, vmax=vmax, picker=True, cmap=cmap, interpolation='nearest'))
[ "def", "_erfimage_imshow_unified", "(", "bn", ",", "ch_idx", ",", "tmin", ",", "tmax", ",", "vmin", ",", "vmax", ",", "ylim", "=", "None", ",", "data", "=", "None", ",", "epochs", "=", "None", ",", "sigma", "=", "None", ",", "order", "=", "None", ",", "scalings", "=", "None", ",", "vline", "=", "None", ",", "x_label", "=", "None", ",", "y_label", "=", "None", ",", "colorbar", "=", "False", ",", "cmap", "=", "'RdBu_r'", ",", "vlim_array", "=", "None", ")", ":", "from", "scipy", "import", "ndimage", "_compute_ax_scalings", "(", "bn", ",", "(", "tmin", ",", "tmax", ")", ",", "(", "0", ",", "len", "(", "epochs", ".", "events", ")", ")", ")", "ax", "=", "bn", ".", "ax", "data_lines", "=", "bn", ".", "data_lines", "extent", "=", "(", "bn", ".", "x_t", "+", "bn", ".", "x_s", "*", "tmin", ",", "bn", ".", "x_t", "+", "bn", ".", "x_s", "*", "tmax", ",", "bn", ".", "y_t", ",", "bn", ".", "y_t", "+", "bn", ".", "y_s", "*", "len", "(", "epochs", ".", "events", ")", ")", "this_data", "=", "data", "[", ":", ",", "ch_idx", ",", ":", "]", "vmin", ",", "vmax", "=", "(", "None", ",", "None", ")", "if", "vlim_array", "is", "None", "else", "vlim_array", "[", "ch_idx", "]", "if", "callable", "(", "order", ")", ":", "order", "=", "order", "(", "epochs", ".", "times", ",", "this_data", ")", "if", "order", "is", "not", "None", ":", "this_data", "=", "this_data", "[", "order", "]", "if", "sigma", ">", "0.", ":", "this_data", "=", "ndimage", ".", "gaussian_filter1d", "(", "this_data", ",", "sigma", "=", "sigma", ",", "axis", "=", "0", ")", "data_lines", ".", "append", "(", "ax", ".", "imshow", "(", "this_data", ",", "extent", "=", "extent", ",", "aspect", "=", "'auto'", ",", "origin", "=", "'lower'", ",", "vmin", "=", "vmin", ",", "vmax", "=", "vmax", ",", "picker", "=", "True", ",", "cmap", "=", "cmap", ",", "interpolation", "=", "'nearest'", ")", ")" ]
https://github.com/mne-tools/mne-python/blob/f90b303ce66a8415e64edd4605b09ac0179c1ebf/mne/viz/topo.py#L542-L569
jaychsu/algorithm
87dac5456b74a515dd97507ac68e9b8588066a04
other/uneven_random_get.py
python
uneven_random_get
(options, rate)
return options[0]
unevenly fetch the option according to the corresponding rate :type options: list[str] :type rate: list[num] :rtype: str
unevenly fetch the option according to the corresponding rate :type options: list[str] :type rate: list[num] :rtype: str
[ "unevenly", "fetch", "the", "option", "according", "to", "the", "corresponding", "rate", ":", "type", "options", ":", "list", "[", "str", "]", ":", "type", "rate", ":", "list", "[", "num", "]", ":", "rtype", ":", "str" ]
def uneven_random_get(options, rate): """unevenly fetch the option according to the corresponding rate :type options: list[str] :type rate: list[num] :rtype: str """ if not options or not rate or len(options) != len(rate): return '' num = 0 rand = random.randint(1, sum(rate)) for i in range(len(rate)): num += rate[i] if num >= rand: return options[i] return options[0]
[ "def", "uneven_random_get", "(", "options", ",", "rate", ")", ":", "if", "not", "options", "or", "not", "rate", "or", "len", "(", "options", ")", "!=", "len", "(", "rate", ")", ":", "return", "''", "num", "=", "0", "rand", "=", "random", ".", "randint", "(", "1", ",", "sum", "(", "rate", ")", ")", "for", "i", "in", "range", "(", "len", "(", "rate", ")", ")", ":", "num", "+=", "rate", "[", "i", "]", "if", "num", ">=", "rand", ":", "return", "options", "[", "i", "]", "return", "options", "[", "0", "]" ]
https://github.com/jaychsu/algorithm/blob/87dac5456b74a515dd97507ac68e9b8588066a04/other/uneven_random_get.py#L8-L26
DLR-RM/stable-baselines3
e9a8979022d7005560d43b7a9c1dc1ba85f7989a
stable_baselines3/common/vec_env/vec_normalize.py
python
VecNormalize.__getstate__
(self)
return state
Gets state for pickling. Excludes self.venv, as in general VecEnv's may not be pickleable.
Gets state for pickling.
[ "Gets", "state", "for", "pickling", "." ]
def __getstate__(self) -> Dict[str, Any]: """ Gets state for pickling. Excludes self.venv, as in general VecEnv's may not be pickleable.""" state = self.__dict__.copy() # these attributes are not pickleable del state["venv"] del state["class_attributes"] # these attributes depend on the above and so we would prefer not to pickle del state["returns"] return state
[ "def", "__getstate__", "(", "self", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "state", "=", "self", ".", "__dict__", ".", "copy", "(", ")", "# these attributes are not pickleable", "del", "state", "[", "\"venv\"", "]", "del", "state", "[", "\"class_attributes\"", "]", "# these attributes depend on the above and so we would prefer not to pickle", "del", "state", "[", "\"returns\"", "]", "return", "state" ]
https://github.com/DLR-RM/stable-baselines3/blob/e9a8979022d7005560d43b7a9c1dc1ba85f7989a/stable_baselines3/common/vec_env/vec_normalize.py#L99-L110
DamnWidget/anaconda
a9998fb362320f907d5ccbc6fcf5b62baca677c0
anaconda_lib/helpers.py
python
valid_languages
(**kwargs)
return ['python'] + languages
Return back valid languages for anaconda plugins
Return back valid languages for anaconda plugins
[ "Return", "back", "valid", "languages", "for", "anaconda", "plugins" ]
def valid_languages(**kwargs): """Return back valid languages for anaconda plugins """ path = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) languages = [ f.rsplit('_', 1)[1].lower() for f in os.listdir(path) if f.startswith('anaconda_') and 'vagrant' not in f ] return ['python'] + languages
[ "def", "valid_languages", "(", "*", "*", "kwargs", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "os", ".", "pardir", ",", "os", ".", "pardir", ")", "languages", "=", "[", "f", ".", "rsplit", "(", "'_'", ",", "1", ")", "[", "1", "]", ".", "lower", "(", ")", "for", "f", "in", "os", ".", "listdir", "(", "path", ")", "if", "f", ".", "startswith", "(", "'anaconda_'", ")", "and", "'vagrant'", "not", "in", "f", "]", "return", "[", "'python'", "]", "+", "languages" ]
https://github.com/DamnWidget/anaconda/blob/a9998fb362320f907d5ccbc6fcf5b62baca677c0/anaconda_lib/helpers.py#L421-L431
oilshell/oil
94388e7d44a9ad879b12615f6203b38596b5a2d3
Python-2.7.13/Lib/mailbox.py
python
_singlefileMailbox.flush
(self)
Write any pending changes to disk.
Write any pending changes to disk.
[ "Write", "any", "pending", "changes", "to", "disk", "." ]
def flush(self): """Write any pending changes to disk.""" if not self._pending: if self._pending_sync: # Messages have only been added, so syncing the file # is enough. _sync_flush(self._file) self._pending_sync = False return # In order to be writing anything out at all, self._toc must # already have been generated (and presumably has been modified # by adding or deleting an item). assert self._toc is not None # Check length of self._file; if it's changed, some other process # has modified the mailbox since we scanned it. self._file.seek(0, 2) cur_len = self._file.tell() if cur_len != self._file_length: raise ExternalClashError('Size of mailbox file changed ' '(expected %i, found %i)' % (self._file_length, cur_len)) new_file = _create_temporary(self._path) try: new_toc = {} self._pre_mailbox_hook(new_file) for key in sorted(self._toc.keys()): start, stop = self._toc[key] self._file.seek(start) self._pre_message_hook(new_file) new_start = new_file.tell() while True: buffer = self._file.read(min(4096, stop - self._file.tell())) if buffer == '': break new_file.write(buffer) new_toc[key] = (new_start, new_file.tell()) self._post_message_hook(new_file) self._file_length = new_file.tell() except: new_file.close() os.remove(new_file.name) raise _sync_close(new_file) # self._file is about to get replaced, so no need to sync. self._file.close() # Make sure the new file's mode is the same as the old file's mode = os.stat(self._path).st_mode os.chmod(new_file.name, mode) try: os.rename(new_file.name, self._path) except OSError, e: if e.errno == errno.EEXIST or \ (os.name == 'os2' and e.errno == errno.EACCES): os.remove(self._path) os.rename(new_file.name, self._path) else: raise self._file = open(self._path, 'rb+') self._toc = new_toc self._pending = False self._pending_sync = False if self._locked: _lock_file(self._file, dotlock=False)
[ "def", "flush", "(", "self", ")", ":", "if", "not", "self", ".", "_pending", ":", "if", "self", ".", "_pending_sync", ":", "# Messages have only been added, so syncing the file", "# is enough.", "_sync_flush", "(", "self", ".", "_file", ")", "self", ".", "_pending_sync", "=", "False", "return", "# In order to be writing anything out at all, self._toc must", "# already have been generated (and presumably has been modified", "# by adding or deleting an item).", "assert", "self", ".", "_toc", "is", "not", "None", "# Check length of self._file; if it's changed, some other process", "# has modified the mailbox since we scanned it.", "self", ".", "_file", ".", "seek", "(", "0", ",", "2", ")", "cur_len", "=", "self", ".", "_file", ".", "tell", "(", ")", "if", "cur_len", "!=", "self", ".", "_file_length", ":", "raise", "ExternalClashError", "(", "'Size of mailbox file changed '", "'(expected %i, found %i)'", "%", "(", "self", ".", "_file_length", ",", "cur_len", ")", ")", "new_file", "=", "_create_temporary", "(", "self", ".", "_path", ")", "try", ":", "new_toc", "=", "{", "}", "self", ".", "_pre_mailbox_hook", "(", "new_file", ")", "for", "key", "in", "sorted", "(", "self", ".", "_toc", ".", "keys", "(", ")", ")", ":", "start", ",", "stop", "=", "self", ".", "_toc", "[", "key", "]", "self", ".", "_file", ".", "seek", "(", "start", ")", "self", ".", "_pre_message_hook", "(", "new_file", ")", "new_start", "=", "new_file", ".", "tell", "(", ")", "while", "True", ":", "buffer", "=", "self", ".", "_file", ".", "read", "(", "min", "(", "4096", ",", "stop", "-", "self", ".", "_file", ".", "tell", "(", ")", ")", ")", "if", "buffer", "==", "''", ":", "break", "new_file", ".", "write", "(", "buffer", ")", "new_toc", "[", "key", "]", "=", "(", "new_start", ",", "new_file", ".", "tell", "(", ")", ")", "self", ".", "_post_message_hook", "(", "new_file", ")", "self", ".", "_file_length", "=", "new_file", ".", "tell", "(", ")", "except", ":", "new_file", ".", "close", "(", ")", "os", ".", "remove", "(", "new_file", ".", "name", ")", "raise", "_sync_close", "(", "new_file", ")", "# self._file is about to get replaced, so no need to sync.", "self", ".", "_file", ".", "close", "(", ")", "# Make sure the new file's mode is the same as the old file's", "mode", "=", "os", ".", "stat", "(", "self", ".", "_path", ")", ".", "st_mode", "os", ".", "chmod", "(", "new_file", ".", "name", ",", "mode", ")", "try", ":", "os", ".", "rename", "(", "new_file", ".", "name", ",", "self", ".", "_path", ")", "except", "OSError", ",", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "EEXIST", "or", "(", "os", ".", "name", "==", "'os2'", "and", "e", ".", "errno", "==", "errno", ".", "EACCES", ")", ":", "os", ".", "remove", "(", "self", ".", "_path", ")", "os", ".", "rename", "(", "new_file", ".", "name", ",", "self", ".", "_path", ")", "else", ":", "raise", "self", ".", "_file", "=", "open", "(", "self", ".", "_path", ",", "'rb+'", ")", "self", ".", "_toc", "=", "new_toc", "self", ".", "_pending", "=", "False", "self", ".", "_pending_sync", "=", "False", "if", "self", ".", "_locked", ":", "_lock_file", "(", "self", ".", "_file", ",", "dotlock", "=", "False", ")" ]
https://github.com/oilshell/oil/blob/94388e7d44a9ad879b12615f6203b38596b5a2d3/Python-2.7.13/Lib/mailbox.py#L640-L706
Chaffelson/nipyapi
d3b186fd701ce308c2812746d98af9120955e810
nipyapi/nifi/models/lineage_entity.py
python
LineageEntity.__repr__
(self)
return self.to_str()
For `print` and `pprint`
For `print` and `pprint`
[ "For", "print", "and", "pprint" ]
def __repr__(self): """ For `print` and `pprint` """ return self.to_str()
[ "def", "__repr__", "(", "self", ")", ":", "return", "self", ".", "to_str", "(", ")" ]
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/lineage_entity.py#L104-L108
jliljebl/flowblade
995313a509b80e99eb1ad550d945bdda5995093b
flowblade-trunk/Flowblade/propertyparse.py
python
_property_type
(value_str)
Gets property type from value string by trying to interpret it as int or float, if both fail it is considered an expression.
Gets property type from value string by trying to interpret it as int or float, if both fail it is considered an expression.
[ "Gets", "property", "type", "from", "value", "string", "by", "trying", "to", "interpret", "it", "as", "int", "or", "float", "if", "both", "fail", "it", "is", "considered", "an", "expression", "." ]
def _property_type(value_str): """ Gets property type from value string by trying to interpret it as int or float, if both fail it is considered an expression. """ try: int(value_str) return PROP_INT except: try: float(value_str) return PROP_FLOAT except: return PROP_EXPRESSION
[ "def", "_property_type", "(", "value_str", ")", ":", "try", ":", "int", "(", "value_str", ")", "return", "PROP_INT", "except", ":", "try", ":", "float", "(", "value_str", ")", "return", "PROP_FLOAT", "except", ":", "return", "PROP_EXPRESSION" ]
https://github.com/jliljebl/flowblade/blob/995313a509b80e99eb1ad550d945bdda5995093b/flowblade-trunk/Flowblade/propertyparse.py#L554-L567
fake-name/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
WebMirror/management/rss_parser_funcs/feed_parse_extractSnowbelldotWordpressCom.py
python
extractSnowbelldotWordpressCom
(item)
return False
Parser for 'snowbelldot.wordpress.com'
Parser for 'snowbelldot.wordpress.com'
[ "Parser", "for", "snowbelldot", ".", "wordpress", ".", "com" ]
def extractSnowbelldotWordpressCom(item): ''' Parser for 'snowbelldot.wordpress.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('Cold King, the Doctor Fei Is Running Away', 'Cold King, the Doctor Fei Is Running Away', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
[ "def", "extractSnowbelldotWordpressCom", "(", "item", ")", ":", "vol", ",", "chp", ",", "frag", ",", "postfix", "=", "extractVolChapterFragmentPostfix", "(", "item", "[", "'title'", "]", ")", "if", "not", "(", "chp", "or", "vol", ")", "or", "\"preview\"", "in", "item", "[", "'title'", "]", ".", "lower", "(", ")", ":", "return", "None", "tagmap", "=", "[", "(", "'Cold King, the Doctor Fei Is Running Away'", ",", "'Cold King, the Doctor Fei Is Running Away'", ",", "'translated'", ")", ",", "(", "'PRC'", ",", "'PRC'", ",", "'translated'", ")", ",", "(", "'Loiterous'", ",", "'Loiterous'", ",", "'oel'", ")", ",", "]", "for", "tagname", ",", "name", ",", "tl_type", "in", "tagmap", ":", "if", "tagname", "in", "item", "[", "'tags'", "]", ":", "return", "buildReleaseMessageWithType", "(", "item", ",", "name", ",", "vol", ",", "chp", ",", "frag", "=", "frag", ",", "postfix", "=", "postfix", ",", "tl_type", "=", "tl_type", ")", "return", "False" ]
https://github.com/fake-name/ReadableWebProxy/blob/ed5c7abe38706acc2684a1e6cd80242a03c5f010/WebMirror/management/rss_parser_funcs/feed_parse_extractSnowbelldotWordpressCom.py#L1-L21
ales-tsurko/cells
4cf7e395cd433762bea70cdc863a346f3a6fe1d0
packaging/macos/python/lib/python3.7/asyncio/protocols.py
python
BaseProtocol.connection_made
(self, transport)
Called when a connection is made. The argument is the transport representing the pipe connection. To receive data, wait for data_received() calls. When the connection is closed, connection_lost() is called.
Called when a connection is made.
[ "Called", "when", "a", "connection", "is", "made", "." ]
def connection_made(self, transport): """Called when a connection is made. The argument is the transport representing the pipe connection. To receive data, wait for data_received() calls. When the connection is closed, connection_lost() is called. """
[ "def", "connection_made", "(", "self", ",", "transport", ")", ":" ]
https://github.com/ales-tsurko/cells/blob/4cf7e395cd433762bea70cdc863a346f3a6fe1d0/packaging/macos/python/lib/python3.7/asyncio/protocols.py#L19-L25
chenjun2hao/CenterFace.pytorch
37a351be07606136561bc56eae716f8575b61e00
src/tools/voc_eval_lib/datasets/voc_eval.py
python
voc_eval
(detpath, annopath, imagesetfile, classname, cachedir, ovthresh=0.5, use_07_metric=False, use_diff=False)
return rec, prec, ap
rec, prec, ap = voc_eval(detpath, annopath, imagesetfile, classname, [ovthresh], [use_07_metric]) Top level function that does the PASCAL VOC evaluation. detpath: Path to detections detpath.format(classname) should produce the detection results file. annopath: Path to annotations annopath.format(imagename) should be the xml annotations file. imagesetfile: Text file containing the list of images, one image per line. classname: Category name (duh) cachedir: Directory for caching the annotations [ovthresh]: Overlap threshold (default = 0.5) [use_07_metric]: Whether to use VOC07's 11 point AP computation (default False)
rec, prec, ap = voc_eval(detpath, annopath, imagesetfile, classname, [ovthresh], [use_07_metric])
[ "rec", "prec", "ap", "=", "voc_eval", "(", "detpath", "annopath", "imagesetfile", "classname", "[", "ovthresh", "]", "[", "use_07_metric", "]", ")" ]
def voc_eval(detpath, annopath, imagesetfile, classname, cachedir, ovthresh=0.5, use_07_metric=False, use_diff=False): """rec, prec, ap = voc_eval(detpath, annopath, imagesetfile, classname, [ovthresh], [use_07_metric]) Top level function that does the PASCAL VOC evaluation. detpath: Path to detections detpath.format(classname) should produce the detection results file. annopath: Path to annotations annopath.format(imagename) should be the xml annotations file. imagesetfile: Text file containing the list of images, one image per line. classname: Category name (duh) cachedir: Directory for caching the annotations [ovthresh]: Overlap threshold (default = 0.5) [use_07_metric]: Whether to use VOC07's 11 point AP computation (default False) """ # assumes detections are in detpath.format(classname) # assumes annotations are in annopath.format(imagename) # assumes imagesetfile is a text file with each line an image name # cachedir caches the annotations in a pickle file # first load gt if not os.path.isdir(cachedir): os.mkdir(cachedir) cachefile = os.path.join(cachedir, '%s_annots.pkl' % imagesetfile) # read list of images with open(imagesetfile, 'r') as f: lines = f.readlines() imagenames = [x.strip() for x in lines] if not os.path.isfile(cachefile): # load annotations recs = {} for i, imagename in enumerate(imagenames): recs[imagename] = parse_rec(annopath.format(imagename)) if i % 100 == 0: print('Reading annotation for {:d}/{:d}'.format( i + 1, len(imagenames))) # save print('Saving cached annotations to {:s}'.format(cachefile)) with open(cachefile, 'wb') as f: pickle.dump(recs, f) else: # load with open(cachefile, 'rb') as f: try: recs = pickle.load(f) except: recs = pickle.load(f, encoding='bytes') # extract gt objects for this class class_recs = {} npos = 0 for imagename in imagenames: R = [obj for obj in recs[imagename] if obj['name'] == classname] bbox = np.array([x['bbox'] for x in R]) if use_diff: difficult = np.array([False for x in R]).astype(np.bool) else: difficult = np.array([x['difficult'] for x in R]).astype(np.bool) det = [False] * len(R) npos = npos + sum(~difficult) class_recs[imagename] = {'bbox': bbox, 'difficult': difficult, 'det': det} # read dets detfile = detpath.format(classname) with open(detfile, 'r') as f: lines = f.readlines() splitlines = [x.strip().split(' ') for x in lines] image_ids = [x[0] for x in splitlines] confidence = np.array([float(x[1]) for x in splitlines]) BB = np.array([[float(z) for z in x[2:]] for x in splitlines]) nd = len(image_ids) tp = np.zeros(nd) fp = np.zeros(nd) if BB.shape[0] > 0: # sort by confidence sorted_ind = np.argsort(-confidence) sorted_scores = np.sort(-confidence) BB = BB[sorted_ind, :] image_ids = [image_ids[x] for x in sorted_ind] # go down dets and mark TPs and FPs for d in range(nd): R = class_recs[image_ids[d]] bb = BB[d, :].astype(float) ovmax = -np.inf BBGT = R['bbox'].astype(float) if BBGT.size > 0: # compute overlaps # intersection ixmin = np.maximum(BBGT[:, 0], bb[0]) iymin = np.maximum(BBGT[:, 1], bb[1]) ixmax = np.minimum(BBGT[:, 2], bb[2]) iymax = np.minimum(BBGT[:, 3], bb[3]) iw = np.maximum(ixmax - ixmin + 1., 0.) ih = np.maximum(iymax - iymin + 1., 0.) inters = iw * ih # union uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) + (BBGT[:, 2] - BBGT[:, 0] + 1.) * (BBGT[:, 3] - BBGT[:, 1] + 1.) - inters) overlaps = inters / uni ovmax = np.max(overlaps) jmax = np.argmax(overlaps) if ovmax > ovthresh: if not R['difficult'][jmax]: if not R['det'][jmax]: tp[d] = 1. R['det'][jmax] = 1 else: fp[d] = 1. else: fp[d] = 1. # compute precision recall fp = np.cumsum(fp) tp = np.cumsum(tp) rec = tp / float(npos) # avoid divide by zero in case the first detection matches a difficult # ground truth prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) ap = voc_ap(rec, prec, use_07_metric) return rec, prec, ap
[ "def", "voc_eval", "(", "detpath", ",", "annopath", ",", "imagesetfile", ",", "classname", ",", "cachedir", ",", "ovthresh", "=", "0.5", ",", "use_07_metric", "=", "False", ",", "use_diff", "=", "False", ")", ":", "# assumes detections are in detpath.format(classname)", "# assumes annotations are in annopath.format(imagename)", "# assumes imagesetfile is a text file with each line an image name", "# cachedir caches the annotations in a pickle file", "# first load gt", "if", "not", "os", ".", "path", ".", "isdir", "(", "cachedir", ")", ":", "os", ".", "mkdir", "(", "cachedir", ")", "cachefile", "=", "os", ".", "path", ".", "join", "(", "cachedir", ",", "'%s_annots.pkl'", "%", "imagesetfile", ")", "# read list of images", "with", "open", "(", "imagesetfile", ",", "'r'", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "imagenames", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "lines", "]", "if", "not", "os", ".", "path", ".", "isfile", "(", "cachefile", ")", ":", "# load annotations", "recs", "=", "{", "}", "for", "i", ",", "imagename", "in", "enumerate", "(", "imagenames", ")", ":", "recs", "[", "imagename", "]", "=", "parse_rec", "(", "annopath", ".", "format", "(", "imagename", ")", ")", "if", "i", "%", "100", "==", "0", ":", "print", "(", "'Reading annotation for {:d}/{:d}'", ".", "format", "(", "i", "+", "1", ",", "len", "(", "imagenames", ")", ")", ")", "# save", "print", "(", "'Saving cached annotations to {:s}'", ".", "format", "(", "cachefile", ")", ")", "with", "open", "(", "cachefile", ",", "'wb'", ")", "as", "f", ":", "pickle", ".", "dump", "(", "recs", ",", "f", ")", "else", ":", "# load", "with", "open", "(", "cachefile", ",", "'rb'", ")", "as", "f", ":", "try", ":", "recs", "=", "pickle", ".", "load", "(", "f", ")", "except", ":", "recs", "=", "pickle", ".", "load", "(", "f", ",", "encoding", "=", "'bytes'", ")", "# extract gt objects for this class", "class_recs", "=", "{", "}", "npos", "=", "0", "for", "imagename", "in", "imagenames", ":", "R", "=", "[", "obj", "for", "obj", "in", "recs", "[", "imagename", "]", "if", "obj", "[", "'name'", "]", "==", "classname", "]", "bbox", "=", "np", ".", "array", "(", "[", "x", "[", "'bbox'", "]", "for", "x", "in", "R", "]", ")", "if", "use_diff", ":", "difficult", "=", "np", ".", "array", "(", "[", "False", "for", "x", "in", "R", "]", ")", ".", "astype", "(", "np", ".", "bool", ")", "else", ":", "difficult", "=", "np", ".", "array", "(", "[", "x", "[", "'difficult'", "]", "for", "x", "in", "R", "]", ")", ".", "astype", "(", "np", ".", "bool", ")", "det", "=", "[", "False", "]", "*", "len", "(", "R", ")", "npos", "=", "npos", "+", "sum", "(", "~", "difficult", ")", "class_recs", "[", "imagename", "]", "=", "{", "'bbox'", ":", "bbox", ",", "'difficult'", ":", "difficult", ",", "'det'", ":", "det", "}", "# read dets", "detfile", "=", "detpath", ".", "format", "(", "classname", ")", "with", "open", "(", "detfile", ",", "'r'", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "splitlines", "=", "[", "x", ".", "strip", "(", ")", ".", "split", "(", "' '", ")", "for", "x", "in", "lines", "]", "image_ids", "=", "[", "x", "[", "0", "]", "for", "x", "in", "splitlines", "]", "confidence", "=", "np", ".", "array", "(", "[", "float", "(", "x", "[", "1", "]", ")", "for", "x", "in", "splitlines", "]", ")", "BB", "=", "np", ".", "array", "(", "[", "[", "float", "(", "z", ")", "for", "z", "in", "x", "[", "2", ":", "]", "]", "for", "x", "in", "splitlines", "]", ")", "nd", "=", "len", "(", "image_ids", ")", "tp", "=", "np", ".", "zeros", "(", "nd", ")", "fp", "=", "np", ".", "zeros", "(", "nd", ")", "if", "BB", ".", "shape", "[", "0", "]", ">", "0", ":", "# sort by confidence", "sorted_ind", "=", "np", ".", "argsort", "(", "-", "confidence", ")", "sorted_scores", "=", "np", ".", "sort", "(", "-", "confidence", ")", "BB", "=", "BB", "[", "sorted_ind", ",", ":", "]", "image_ids", "=", "[", "image_ids", "[", "x", "]", "for", "x", "in", "sorted_ind", "]", "# go down dets and mark TPs and FPs", "for", "d", "in", "range", "(", "nd", ")", ":", "R", "=", "class_recs", "[", "image_ids", "[", "d", "]", "]", "bb", "=", "BB", "[", "d", ",", ":", "]", ".", "astype", "(", "float", ")", "ovmax", "=", "-", "np", ".", "inf", "BBGT", "=", "R", "[", "'bbox'", "]", ".", "astype", "(", "float", ")", "if", "BBGT", ".", "size", ">", "0", ":", "# compute overlaps", "# intersection", "ixmin", "=", "np", ".", "maximum", "(", "BBGT", "[", ":", ",", "0", "]", ",", "bb", "[", "0", "]", ")", "iymin", "=", "np", ".", "maximum", "(", "BBGT", "[", ":", ",", "1", "]", ",", "bb", "[", "1", "]", ")", "ixmax", "=", "np", ".", "minimum", "(", "BBGT", "[", ":", ",", "2", "]", ",", "bb", "[", "2", "]", ")", "iymax", "=", "np", ".", "minimum", "(", "BBGT", "[", ":", ",", "3", "]", ",", "bb", "[", "3", "]", ")", "iw", "=", "np", ".", "maximum", "(", "ixmax", "-", "ixmin", "+", "1.", ",", "0.", ")", "ih", "=", "np", ".", "maximum", "(", "iymax", "-", "iymin", "+", "1.", ",", "0.", ")", "inters", "=", "iw", "*", "ih", "# union", "uni", "=", "(", "(", "bb", "[", "2", "]", "-", "bb", "[", "0", "]", "+", "1.", ")", "*", "(", "bb", "[", "3", "]", "-", "bb", "[", "1", "]", "+", "1.", ")", "+", "(", "BBGT", "[", ":", ",", "2", "]", "-", "BBGT", "[", ":", ",", "0", "]", "+", "1.", ")", "*", "(", "BBGT", "[", ":", ",", "3", "]", "-", "BBGT", "[", ":", ",", "1", "]", "+", "1.", ")", "-", "inters", ")", "overlaps", "=", "inters", "/", "uni", "ovmax", "=", "np", ".", "max", "(", "overlaps", ")", "jmax", "=", "np", ".", "argmax", "(", "overlaps", ")", "if", "ovmax", ">", "ovthresh", ":", "if", "not", "R", "[", "'difficult'", "]", "[", "jmax", "]", ":", "if", "not", "R", "[", "'det'", "]", "[", "jmax", "]", ":", "tp", "[", "d", "]", "=", "1.", "R", "[", "'det'", "]", "[", "jmax", "]", "=", "1", "else", ":", "fp", "[", "d", "]", "=", "1.", "else", ":", "fp", "[", "d", "]", "=", "1.", "# compute precision recall", "fp", "=", "np", ".", "cumsum", "(", "fp", ")", "tp", "=", "np", ".", "cumsum", "(", "tp", ")", "rec", "=", "tp", "/", "float", "(", "npos", ")", "# avoid divide by zero in case the first detection matches a difficult", "# ground truth", "prec", "=", "tp", "/", "np", ".", "maximum", "(", "tp", "+", "fp", ",", "np", ".", "finfo", "(", "np", ".", "float64", ")", ".", "eps", ")", "ap", "=", "voc_ap", "(", "rec", ",", "prec", ",", "use_07_metric", ")", "return", "rec", ",", "prec", ",", "ap" ]
https://github.com/chenjun2hao/CenterFace.pytorch/blob/37a351be07606136561bc56eae716f8575b61e00/src/tools/voc_eval_lib/datasets/voc_eval.py#L70-L215
iclavera/learning_to_adapt
bd7d99ba402521c96631e7d09714128f549db0f1
learning_to_adapt/mujoco_py/mjtypes.py
python
MjModelWrapper.light_diffuse
(self, value)
[]
def light_diffuse(self, value): val_ptr = np.array(value, dtype=np.float64).ctypes.data_as(POINTER(c_float)) memmove(self._wrapped.contents.light_diffuse, val_ptr, self.nlight*3 * sizeof(c_float))
[ "def", "light_diffuse", "(", "self", ",", "value", ")", ":", "val_ptr", "=", "np", ".", "array", "(", "value", ",", "dtype", "=", "np", ".", "float64", ")", ".", "ctypes", ".", "data_as", "(", "POINTER", "(", "c_float", ")", ")", "memmove", "(", "self", ".", "_wrapped", ".", "contents", ".", "light_diffuse", ",", "val_ptr", ",", "self", ".", "nlight", "*", "3", "*", "sizeof", "(", "c_float", ")", ")" ]
https://github.com/iclavera/learning_to_adapt/blob/bd7d99ba402521c96631e7d09714128f549db0f1/learning_to_adapt/mujoco_py/mjtypes.py#L4794-L4796
codepr/creak
52b0d74c3f77dad45e97dd7aa22976b83a0d3676
creak.py
python
get_mitm
(parsed_args)
return (args, changed, original_mac_addr, cmitm.PcapMitm(dev, utils.parse_mac(mac_addr), args.source, args.target, debug, verbose))
create an object of type Mitm based on arguments received
create an object of type Mitm based on arguments received
[ "create", "an", "object", "of", "type", "Mitm", "based", "on", "arguments", "received" ]
def get_mitm(parsed_args): """ create an object of type Mitm based on arguments received """ args = parsed_args if not args.dev: sys.exit(sys.argv[0] + ' -h for help\n[!] Must specify interface') dev = "%s" % "','".join(args.dev) original_mac_addr = utils.get_mac_by_dev(dev) mac_addr, changed = original_mac_addr, False if not args.source: try: args.source = utils.get_default_gateway_linux() except OSError: args.source = raw_input('[!] Unable to retrieve default gateway, please specify one: ') if not utils.is_ipv4(args.source): exit('[!] Unable to retrieve default gateway, please specify one using -s option') else: pass if not args.target: args.target = raw_input('[?] No target address specified, please insert one: ') if not utils.is_ipv4(args.target): exit('[!] Must specify at least one target address') else: if len(args.target) == 1: args.target = ''.join(args.target) conf = ConfigParser.ConfigParser() conf.read('./creak/config') verbose = conf.getboolean('output', 'VERBOSE') debug = conf.getboolean('output', 'DEBUG') if args.verbosity: verbose = True if args.debug: debug = True if args.spoof is True: choice = raw_input('[+] In order to change MAC address ' + G + dev + W + ' must be temporary put down. Proceed?[y/n] ') if choice == 'y': if not args.macaddr and not args.manufacturer: mac_addr = utils.fake_mac_address([], 1) elif args.macaddr and not args.manufacturer: if utils.parse_mac(args.macaddr) != utils.parse_mac(original_mac_addr): mac_addr = utils.fake_mac_address(utils.mac_to_hex(args.macaddr)) elif args.manufacturer: macs = utils.get_manufacturer(args.manufacturer) mac_addr = utils.fake_mac_address(utils.mac_to_hex(random.choice(macs))) try: utils.change_mac(dev, mac_addr) changed = True except OSError: pass print("[+] Waiting for wireless reactivation..") if args.mode == 1 or args.mode == 2: time.sleep(10) else: time.sleep(4) # no spoof but set mac address anyway elif args.macaddr: mac_addr = args.macaddr print("[+] Using " + G + mac_addr + W + " MAC address\n" "[+] Set " + G + args.source + W + " as default gateway") if conf.get('output', 'ENGINE').lower() == 'scapy': return (args, changed, original_mac_addr, cmitm.ScapyMitm(dev, utils.parse_mac(mac_addr), args.source, args.target, debug, verbose)) return (args, changed, original_mac_addr, cmitm.PcapMitm(dev, utils.parse_mac(mac_addr), args.source, args.target, debug, verbose))
[ "def", "get_mitm", "(", "parsed_args", ")", ":", "args", "=", "parsed_args", "if", "not", "args", ".", "dev", ":", "sys", ".", "exit", "(", "sys", ".", "argv", "[", "0", "]", "+", "' -h for help\\n[!] Must specify interface'", ")", "dev", "=", "\"%s\"", "%", "\"','\"", ".", "join", "(", "args", ".", "dev", ")", "original_mac_addr", "=", "utils", ".", "get_mac_by_dev", "(", "dev", ")", "mac_addr", ",", "changed", "=", "original_mac_addr", ",", "False", "if", "not", "args", ".", "source", ":", "try", ":", "args", ".", "source", "=", "utils", ".", "get_default_gateway_linux", "(", ")", "except", "OSError", ":", "args", ".", "source", "=", "raw_input", "(", "'[!] Unable to retrieve default gateway, please specify one: '", ")", "if", "not", "utils", ".", "is_ipv4", "(", "args", ".", "source", ")", ":", "exit", "(", "'[!] Unable to retrieve default gateway, please specify one using -s option'", ")", "else", ":", "pass", "if", "not", "args", ".", "target", ":", "args", ".", "target", "=", "raw_input", "(", "'[?] No target address specified, please insert one: '", ")", "if", "not", "utils", ".", "is_ipv4", "(", "args", ".", "target", ")", ":", "exit", "(", "'[!] Must specify at least one target address'", ")", "else", ":", "if", "len", "(", "args", ".", "target", ")", "==", "1", ":", "args", ".", "target", "=", "''", ".", "join", "(", "args", ".", "target", ")", "conf", "=", "ConfigParser", ".", "ConfigParser", "(", ")", "conf", ".", "read", "(", "'./creak/config'", ")", "verbose", "=", "conf", ".", "getboolean", "(", "'output'", ",", "'VERBOSE'", ")", "debug", "=", "conf", ".", "getboolean", "(", "'output'", ",", "'DEBUG'", ")", "if", "args", ".", "verbosity", ":", "verbose", "=", "True", "if", "args", ".", "debug", ":", "debug", "=", "True", "if", "args", ".", "spoof", "is", "True", ":", "choice", "=", "raw_input", "(", "'[+] In order to change MAC address '", "+", "G", "+", "dev", "+", "W", "+", "' must be temporary put down. Proceed?[y/n] '", ")", "if", "choice", "==", "'y'", ":", "if", "not", "args", ".", "macaddr", "and", "not", "args", ".", "manufacturer", ":", "mac_addr", "=", "utils", ".", "fake_mac_address", "(", "[", "]", ",", "1", ")", "elif", "args", ".", "macaddr", "and", "not", "args", ".", "manufacturer", ":", "if", "utils", ".", "parse_mac", "(", "args", ".", "macaddr", ")", "!=", "utils", ".", "parse_mac", "(", "original_mac_addr", ")", ":", "mac_addr", "=", "utils", ".", "fake_mac_address", "(", "utils", ".", "mac_to_hex", "(", "args", ".", "macaddr", ")", ")", "elif", "args", ".", "manufacturer", ":", "macs", "=", "utils", ".", "get_manufacturer", "(", "args", ".", "manufacturer", ")", "mac_addr", "=", "utils", ".", "fake_mac_address", "(", "utils", ".", "mac_to_hex", "(", "random", ".", "choice", "(", "macs", ")", ")", ")", "try", ":", "utils", ".", "change_mac", "(", "dev", ",", "mac_addr", ")", "changed", "=", "True", "except", "OSError", ":", "pass", "print", "(", "\"[+] Waiting for wireless reactivation..\"", ")", "if", "args", ".", "mode", "==", "1", "or", "args", ".", "mode", "==", "2", ":", "time", ".", "sleep", "(", "10", ")", "else", ":", "time", ".", "sleep", "(", "4", ")", "# no spoof but set mac address anyway", "elif", "args", ".", "macaddr", ":", "mac_addr", "=", "args", ".", "macaddr", "print", "(", "\"[+] Using \"", "+", "G", "+", "mac_addr", "+", "W", "+", "\" MAC address\\n\"", "\"[+] Set \"", "+", "G", "+", "args", ".", "source", "+", "W", "+", "\" as default gateway\"", ")", "if", "conf", ".", "get", "(", "'output'", ",", "'ENGINE'", ")", ".", "lower", "(", ")", "==", "'scapy'", ":", "return", "(", "args", ",", "changed", ",", "original_mac_addr", ",", "cmitm", ".", "ScapyMitm", "(", "dev", ",", "utils", ".", "parse_mac", "(", "mac_addr", ")", ",", "args", ".", "source", ",", "args", ".", "target", ",", "debug", ",", "verbose", ")", ")", "return", "(", "args", ",", "changed", ",", "original_mac_addr", ",", "cmitm", ".", "PcapMitm", "(", "dev", ",", "utils", ".", "parse_mac", "(", "mac_addr", ")", ",", "args", ".", "source", ",", "args", ".", "target", ",", "debug", ",", "verbose", ")", ")" ]
https://github.com/codepr/creak/blob/52b0d74c3f77dad45e97dd7aa22976b83a0d3676/creak.py#L96-L177
angr/angr
4b04d56ace135018083d36d9083805be8146688b
angr/analyses/vtable.py
python
VtableFinder.is_cross_referenced
(self, addr)
return addr in self.project.kb.xrefs.xrefs_by_dst
[]
def is_cross_referenced(self, addr): return addr in self.project.kb.xrefs.xrefs_by_dst
[ "def", "is_cross_referenced", "(", "self", ",", "addr", ")", ":", "return", "addr", "in", "self", ".", "project", ".", "kb", ".", "xrefs", ".", "xrefs_by_dst" ]
https://github.com/angr/angr/blob/4b04d56ace135018083d36d9083805be8146688b/angr/analyses/vtable.py#L42-L43
sametmax/Django--an-app-at-a-time
99eddf12ead76e6dfbeb09ce0bae61e282e22f8a
ignore_this_directory/django/db/backends/base/operations.py
python
BaseDatabaseOperations.max_name_length
(self)
return None
Return the maximum length of table and column names, or None if there is no limit.
Return the maximum length of table and column names, or None if there is no limit.
[ "Return", "the", "maximum", "length", "of", "table", "and", "column", "names", "or", "None", "if", "there", "is", "no", "limit", "." ]
def max_name_length(self): """ Return the maximum length of table and column names, or None if there is no limit. """ return None
[ "def", "max_name_length", "(", "self", ")", ":", "return", "None" ]
https://github.com/sametmax/Django--an-app-at-a-time/blob/99eddf12ead76e6dfbeb09ce0bae61e282e22f8a/ignore_this_directory/django/db/backends/base/operations.py#L272-L277
kuri65536/python-for-android
26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891
python3-alpha/python-libs/pyxmpp2/ext/vcard.py
python
VCardName.as_xml
(self,parent)
return n
Create vcard-tmp XML representation of the field. :Parameters: - `parent`: parent node for the element :Types: - `parent`: `libxml2.xmlNode` :return: xml node with the field data. :returntype: `libxml2.xmlNode`
Create vcard-tmp XML representation of the field.
[ "Create", "vcard", "-", "tmp", "XML", "representation", "of", "the", "field", "." ]
def as_xml(self,parent): """Create vcard-tmp XML representation of the field. :Parameters: - `parent`: parent node for the element :Types: - `parent`: `libxml2.xmlNode` :return: xml node with the field data. :returntype: `libxml2.xmlNode`""" n=parent.newChild(None,"N",None) n.newTextChild(None,"FAMILY",to_utf8(self.family)) n.newTextChild(None,"GIVEN",to_utf8(self.given)) n.newTextChild(None,"MIDDLE",to_utf8(self.middle)) n.newTextChild(None,"PREFIX",to_utf8(self.prefix)) n.newTextChild(None,"SUFFIX",to_utf8(self.suffix)) return n
[ "def", "as_xml", "(", "self", ",", "parent", ")", ":", "n", "=", "parent", ".", "newChild", "(", "None", ",", "\"N\"", ",", "None", ")", "n", ".", "newTextChild", "(", "None", ",", "\"FAMILY\"", ",", "to_utf8", "(", "self", ".", "family", ")", ")", "n", ".", "newTextChild", "(", "None", ",", "\"GIVEN\"", ",", "to_utf8", "(", "self", ".", "given", ")", ")", "n", ".", "newTextChild", "(", "None", ",", "\"MIDDLE\"", ",", "to_utf8", "(", "self", ".", "middle", ")", ")", "n", ".", "newTextChild", "(", "None", ",", "\"PREFIX\"", ",", "to_utf8", "(", "self", ".", "prefix", ")", ")", "n", ".", "newTextChild", "(", "None", ",", "\"SUFFIX\"", ",", "to_utf8", "(", "self", ".", "suffix", ")", ")", "return", "n" ]
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python3-alpha/python-libs/pyxmpp2/ext/vcard.py#L323-L339
sagemath/sage
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
src/sage/modular/hecke/module.py
python
HeckeModule_free_module._repr_
(self)
return repr(type(self))
r""" EXAMPLES:: sage: M = sage.modular.hecke.module.HeckeModule_free_module(QQ, 12, -4); M <class 'sage.modular.hecke.module.HeckeModule_free_module_with_category'> .. TODO:: Implement a nicer repr, or implement the methods required by :class:`ModulesWithBasis` to benefit from :meth:`ModulesWithBasis.ParentMethods._repr_`.
r"""
[ "r" ]
def _repr_(self): r""" EXAMPLES:: sage: M = sage.modular.hecke.module.HeckeModule_free_module(QQ, 12, -4); M <class 'sage.modular.hecke.module.HeckeModule_free_module_with_category'> .. TODO:: Implement a nicer repr, or implement the methods required by :class:`ModulesWithBasis` to benefit from :meth:`ModulesWithBasis.ParentMethods._repr_`. """ return repr(type(self))
[ "def", "_repr_", "(", "self", ")", ":", "return", "repr", "(", "type", "(", "self", ")", ")" ]
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/modular/hecke/module.py#L512-L526
mchristopher/PokemonGo-DesktopMap
ec37575f2776ee7d64456e2a1f6b6b78830b4fe0
app/pywin/Lib/random.py
python
Random.jumpahead
(self, n)
Change the internal state to one that is likely far away from the current state. This method will not be in Py3.x, so it is better to simply reseed.
Change the internal state to one that is likely far away from the current state. This method will not be in Py3.x, so it is better to simply reseed.
[ "Change", "the", "internal", "state", "to", "one", "that", "is", "likely", "far", "away", "from", "the", "current", "state", ".", "This", "method", "will", "not", "be", "in", "Py3", ".", "x", "so", "it", "is", "better", "to", "simply", "reseed", "." ]
def jumpahead(self, n): """Change the internal state to one that is likely far away from the current state. This method will not be in Py3.x, so it is better to simply reseed. """ # The super.jumpahead() method uses shuffling to change state, # so it needs a large and "interesting" n to work with. Here, # we use hashing to create a large n for the shuffle. s = repr(n) + repr(self.getstate()) n = int(_hashlib.new('sha512', s).hexdigest(), 16) super(Random, self).jumpahead(n)
[ "def", "jumpahead", "(", "self", ",", "n", ")", ":", "# The super.jumpahead() method uses shuffling to change state,", "# so it needs a large and \"interesting\" n to work with. Here,", "# we use hashing to create a large n for the shuffle.", "s", "=", "repr", "(", "n", ")", "+", "repr", "(", "self", ".", "getstate", "(", ")", ")", "n", "=", "int", "(", "_hashlib", ".", "new", "(", "'sha512'", ",", "s", ")", ".", "hexdigest", "(", ")", ",", "16", ")", "super", "(", "Random", ",", "self", ")", ".", "jumpahead", "(", "n", ")" ]
https://github.com/mchristopher/PokemonGo-DesktopMap/blob/ec37575f2776ee7d64456e2a1f6b6b78830b4fe0/app/pywin/Lib/random.py#L147-L157
OpenEndedGroup/Field
4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c
Contents/lib/python/mailbox.py
python
MH.unlock
(self)
Unlock the mailbox if it is locked.
Unlock the mailbox if it is locked.
[ "Unlock", "the", "mailbox", "if", "it", "is", "locked", "." ]
def unlock(self): """Unlock the mailbox if it is locked.""" if self._locked: _unlock_file(self._file) _sync_close(self._file) self._file.close() del self._file self._locked = False
[ "def", "unlock", "(", "self", ")", ":", "if", "self", ".", "_locked", ":", "_unlock_file", "(", "self", ".", "_file", ")", "_sync_close", "(", "self", ".", "_file", ")", "self", ".", "_file", ".", "close", "(", ")", "del", "self", ".", "_file", "self", ".", "_locked", "=", "False" ]
https://github.com/OpenEndedGroup/Field/blob/4f7c8edfb01bb0ccc927b78d3c500f018a4ae37c/Contents/lib/python/mailbox.py#L953-L960
Udayraj123/OMRChecker
4f11cf4a30c9f6c1cf780f300837d50f7db1fda7
utils.py
python
adjust_gamma
(image, gamma=1.0)
return cv2.LUT(image, table)
[]
def adjust_gamma(image, gamma=1.0): # build a lookup table mapping the pixel values [0, 255] to # their adjusted gamma values invGamma = 1.0 / gamma table = np.array([((i / 255.0) ** invGamma) * 255 for i in np.arange(0, 256)]).astype("uint8") # apply gamma correction using the lookup table return cv2.LUT(image, table)
[ "def", "adjust_gamma", "(", "image", ",", "gamma", "=", "1.0", ")", ":", "# build a lookup table mapping the pixel values [0, 255] to", "# their adjusted gamma values", "invGamma", "=", "1.0", "/", "gamma", "table", "=", "np", ".", "array", "(", "[", "(", "(", "i", "/", "255.0", ")", "**", "invGamma", ")", "*", "255", "for", "i", "in", "np", ".", "arange", "(", "0", ",", "256", ")", "]", ")", ".", "astype", "(", "\"uint8\"", ")", "# apply gamma correction using the lookup table", "return", "cv2", ".", "LUT", "(", "image", ",", "table", ")" ]
https://github.com/Udayraj123/OMRChecker/blob/4f11cf4a30c9f6c1cf780f300837d50f7db1fda7/utils.py#L451-L459
tomplus/kubernetes_asyncio
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
kubernetes_asyncio/client/models/v1_aws_elastic_block_store_volume_source.py
python
V1AWSElasticBlockStoreVolumeSource.read_only
(self)
return self._read_only
Gets the read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501 Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501 :return: The read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501 :rtype: bool
Gets the read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
[ "Gets", "the", "read_only", "of", "this", "V1AWSElasticBlockStoreVolumeSource", ".", "#", "noqa", ":", "E501" ]
def read_only(self): """Gets the read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501 Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501 :return: The read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501 :rtype: bool """ return self._read_only
[ "def", "read_only", "(", "self", ")", ":", "return", "self", ".", "_read_only" ]
https://github.com/tomplus/kubernetes_asyncio/blob/f028cc793e3a2c519be6a52a49fb77ff0b014c9b/kubernetes_asyncio/client/models/v1_aws_elastic_block_store_volume_source.py#L116-L124