text
stringlengths
89
104k
code_tokens
sequence
avg_line_len
float64
7.91
980
score
float64
0
630
def append(self, child): """ Append the given child :class:`Element <hl7apy.core.Element>` :param child: an instance of an :class:`Element <hl7apy.core.Element>` subclass """ if self._can_add_child(child): if self.element == child.parent: self._remove_from_traversal_index(child) self.list.append(child) try: self.indexes[child.name].append(child) except KeyError: self.indexes[child.name] = [child] elif self.element == child.traversal_parent: try: self.traversal_indexes[child.name].append(child) except KeyError: self.traversal_indexes[child.name] = [child]
[ "def", "append", "(", "self", ",", "child", ")", ":", "if", "self", ".", "_can_add_child", "(", "child", ")", ":", "if", "self", ".", "element", "==", "child", ".", "parent", ":", "self", ".", "_remove_from_traversal_index", "(", "child", ")", "self", ".", "list", ".", "append", "(", "child", ")", "try", ":", "self", ".", "indexes", "[", "child", ".", "name", "]", ".", "append", "(", "child", ")", "except", "KeyError", ":", "self", ".", "indexes", "[", "child", ".", "name", "]", "=", "[", "child", "]", "elif", "self", ".", "element", "==", "child", ".", "traversal_parent", ":", "try", ":", "self", ".", "traversal_indexes", "[", "child", ".", "name", "]", ".", "append", "(", "child", ")", "except", "KeyError", ":", "self", ".", "traversal_indexes", "[", "child", ".", "name", "]", "=", "[", "child", "]" ]
39.5
14.9
def get_membership_document(membership_type: str, current_block: dict, identity: Identity, salt: str, password: str) -> Membership: """ Get a Membership document :param membership_type: "IN" to ask for membership or "OUT" to cancel membership :param current_block: Current block data :param identity: Identity document :param salt: Passphrase of the account :param password: Password of the account :rtype: Membership """ # get current block BlockStamp timestamp = BlockUID(current_block['number'], current_block['hash']) # create keys from credentials key = SigningKey.from_credentials(salt, password) # create identity document membership = Membership( version=10, currency=current_block['currency'], issuer=key.pubkey, membership_ts=timestamp, membership_type=membership_type, uid=identity.uid, identity_ts=identity.timestamp, signature=None ) # sign document membership.sign([key]) return membership
[ "def", "get_membership_document", "(", "membership_type", ":", "str", ",", "current_block", ":", "dict", ",", "identity", ":", "Identity", ",", "salt", ":", "str", ",", "password", ":", "str", ")", "->", "Membership", ":", "# get current block BlockStamp", "timestamp", "=", "BlockUID", "(", "current_block", "[", "'number'", "]", ",", "current_block", "[", "'hash'", "]", ")", "# create keys from credentials", "key", "=", "SigningKey", ".", "from_credentials", "(", "salt", ",", "password", ")", "# create identity document", "membership", "=", "Membership", "(", "version", "=", "10", ",", "currency", "=", "current_block", "[", "'currency'", "]", ",", "issuer", "=", "key", ".", "pubkey", ",", "membership_ts", "=", "timestamp", ",", "membership_type", "=", "membership_type", ",", "uid", "=", "identity", ".", "uid", ",", "identity_ts", "=", "identity", ".", "timestamp", ",", "signature", "=", "None", ")", "# sign document", "membership", ".", "sign", "(", "[", "key", "]", ")", "return", "membership" ]
28.972222
19.194444
def getDetail(self, row, detail_field): """Gets the value of the detail *detail_field* of paramter at index *row* from its selected components `auto_details`. All of the selected components value for *detail_field* must match :param row: the ith parameter number :type row: int :param detail_field: auto_details member key :type detail_field: str :returns: value type appropriate for parameter """ param = self._parameters[row] param_type = param['parameter'] components = param['selection'] if len(components) == 0 or param_type == '': return None # all components must match matching_details = [] # for comp in components: for comp in components: alldetails = comp.auto_details() if not param_type in alldetails: # self.hintRequested.emit('INCOMPATABLE COMPONENTS FOR PARAMETER TYPE {}'.format(param_type)) return None details = alldetails[param_type] matching_details.append(details[detail_field]) matching_details = set(matching_details) if len(matching_details) > 1: print 'Components with mis-matched units!' return None return matching_details.pop()
[ "def", "getDetail", "(", "self", ",", "row", ",", "detail_field", ")", ":", "param", "=", "self", ".", "_parameters", "[", "row", "]", "param_type", "=", "param", "[", "'parameter'", "]", "components", "=", "param", "[", "'selection'", "]", "if", "len", "(", "components", ")", "==", "0", "or", "param_type", "==", "''", ":", "return", "None", "# all components must match", "matching_details", "=", "[", "]", "# for comp in components:", "for", "comp", "in", "components", ":", "alldetails", "=", "comp", ".", "auto_details", "(", ")", "if", "not", "param_type", "in", "alldetails", ":", "# self.hintRequested.emit('INCOMPATABLE COMPONENTS FOR PARAMETER TYPE {}'.format(param_type))", "return", "None", "details", "=", "alldetails", "[", "param_type", "]", "matching_details", ".", "append", "(", "details", "[", "detail_field", "]", ")", "matching_details", "=", "set", "(", "matching_details", ")", "if", "len", "(", "matching_details", ")", ">", "1", ":", "print", "'Components with mis-matched units!'", "return", "None", "return", "matching_details", ".", "pop", "(", ")" ]
40.9375
12.59375
def expose_attribute(self, local_name, attribute_type, remote_name=None, display_name=None, is_required=False, is_readonly=False, max_length=None, min_length=None, is_identifier=False, choices=None, is_unique=False, is_email=False, is_login=False, is_editable=True, is_password=False, can_order=False, can_search=False, subtype=None, min_value=None, max_value=None): """ Expose local_name as remote_name An exposed attribute `local_name` will be sent within the HTTP request as a `remote_name` """ if remote_name is None: remote_name = local_name if display_name is None: display_name = local_name attribute = NURemoteAttribute(local_name=local_name, remote_name=remote_name, attribute_type=attribute_type) attribute.display_name = display_name attribute.is_required = is_required attribute.is_readonly = is_readonly attribute.min_length = min_length attribute.max_length = max_length attribute.is_editable = is_editable attribute.is_identifier = is_identifier attribute.choices = choices attribute.is_unique = is_unique attribute.is_email = is_email attribute.is_login = is_login attribute.is_password = is_password attribute.can_order = can_order attribute.can_search = can_search attribute.subtype = subtype attribute.min_value = min_value attribute.max_value = max_value self._attributes[local_name] = attribute
[ "def", "expose_attribute", "(", "self", ",", "local_name", ",", "attribute_type", ",", "remote_name", "=", "None", ",", "display_name", "=", "None", ",", "is_required", "=", "False", ",", "is_readonly", "=", "False", ",", "max_length", "=", "None", ",", "min_length", "=", "None", ",", "is_identifier", "=", "False", ",", "choices", "=", "None", ",", "is_unique", "=", "False", ",", "is_email", "=", "False", ",", "is_login", "=", "False", ",", "is_editable", "=", "True", ",", "is_password", "=", "False", ",", "can_order", "=", "False", ",", "can_search", "=", "False", ",", "subtype", "=", "None", ",", "min_value", "=", "None", ",", "max_value", "=", "None", ")", ":", "if", "remote_name", "is", "None", ":", "remote_name", "=", "local_name", "if", "display_name", "is", "None", ":", "display_name", "=", "local_name", "attribute", "=", "NURemoteAttribute", "(", "local_name", "=", "local_name", ",", "remote_name", "=", "remote_name", ",", "attribute_type", "=", "attribute_type", ")", "attribute", ".", "display_name", "=", "display_name", "attribute", ".", "is_required", "=", "is_required", "attribute", ".", "is_readonly", "=", "is_readonly", "attribute", ".", "min_length", "=", "min_length", "attribute", ".", "max_length", "=", "max_length", "attribute", ".", "is_editable", "=", "is_editable", "attribute", ".", "is_identifier", "=", "is_identifier", "attribute", ".", "choices", "=", "choices", "attribute", ".", "is_unique", "=", "is_unique", "attribute", ".", "is_email", "=", "is_email", "attribute", ".", "is_login", "=", "is_login", "attribute", ".", "is_password", "=", "is_password", "attribute", ".", "can_order", "=", "can_order", "attribute", ".", "can_search", "=", "can_search", "attribute", ".", "subtype", "=", "subtype", "attribute", ".", "min_value", "=", "min_value", "attribute", ".", "max_value", "=", "max_value", "self", ".", "_attributes", "[", "local_name", "]", "=", "attribute" ]
46.060606
22.393939
def _eq(self, T, P): """Procedure for calculate the composition in saturation state Parameters ---------- T : float Temperature [K] P : float Pressure [MPa] Returns ------- Asat : float Saturation mass fraction of dry air in humid air [kg/kg] """ if T <= 273.16: ice = _Ice(T, P) gw = ice["g"] else: water = IAPWS95(T=T, P=P) gw = water.g def f(parr): rho, a = parr if a > 1: a = 1 fa = self._fav(T, rho, a) muw = fa["fir"]+rho*fa["fird"]-a*fa["fira"] return gw-muw, rho**2*fa["fird"]/1000-P rinput = fsolve(f, [1, 0.95], full_output=True) Asat = rinput[0][1] return Asat
[ "def", "_eq", "(", "self", ",", "T", ",", "P", ")", ":", "if", "T", "<=", "273.16", ":", "ice", "=", "_Ice", "(", "T", ",", "P", ")", "gw", "=", "ice", "[", "\"g\"", "]", "else", ":", "water", "=", "IAPWS95", "(", "T", "=", "T", ",", "P", "=", "P", ")", "gw", "=", "water", ".", "g", "def", "f", "(", "parr", ")", ":", "rho", ",", "a", "=", "parr", "if", "a", ">", "1", ":", "a", "=", "1", "fa", "=", "self", ".", "_fav", "(", "T", ",", "rho", ",", "a", ")", "muw", "=", "fa", "[", "\"fir\"", "]", "+", "rho", "*", "fa", "[", "\"fird\"", "]", "-", "a", "*", "fa", "[", "\"fira\"", "]", "return", "gw", "-", "muw", ",", "rho", "**", "2", "*", "fa", "[", "\"fird\"", "]", "/", "1000", "-", "P", "rinput", "=", "fsolve", "(", "f", ",", "[", "1", ",", "0.95", "]", ",", "full_output", "=", "True", ")", "Asat", "=", "rinput", "[", "0", "]", "[", "1", "]", "return", "Asat" ]
24.939394
19.272727
def restore_app_connection(self, port=None): """Restores the sl4a after device got disconnected. Instead of creating new instance of the client: - Uses the given port (or find a new available host_port if none is given). - Tries to connect to remote server with selected port. Args: port: If given, this is the host port from which to connect to remote device port. If not provided, find a new available port as host port. Raises: AppRestoreConnectionError: When the app was not able to be started. """ self.host_port = port or utils.get_available_host_port() self._retry_connect() self.ed = self._start_event_client()
[ "def", "restore_app_connection", "(", "self", ",", "port", "=", "None", ")", ":", "self", ".", "host_port", "=", "port", "or", "utils", ".", "get_available_host_port", "(", ")", "self", ".", "_retry_connect", "(", ")", "self", ".", "ed", "=", "self", ".", "_start_event_client", "(", ")" ]
39.421053
23.631579
def exit(exit_code=0): """ Exits the Application. :param exit_code: Exit code. :type exit_code: int """ for line in SESSION_FOOTER_TEXT: LOGGER.info(line) foundations.verbose.remove_logging_handler(RuntimeGlobals.logging_console_handler) RuntimeGlobals.application.exit(exit_code)
[ "def", "exit", "(", "exit_code", "=", "0", ")", ":", "for", "line", "in", "SESSION_FOOTER_TEXT", ":", "LOGGER", ".", "info", "(", "line", ")", "foundations", ".", "verbose", ".", "remove_logging_handler", "(", "RuntimeGlobals", ".", "logging_console_handler", ")", "RuntimeGlobals", ".", "application", ".", "exit", "(", "exit_code", ")" ]
22.214286
20.5
def finish_async_rpc(self, address, rpc_id, response): """Finish a previous asynchronous RPC. This method should be called by a peripheral tile that previously had an RPC called on it and chose to response asynchronously by raising ``AsynchronousRPCResponse`` in the RPC handler itself. The response passed to this function will be returned to the caller as if the RPC had returned it immediately. This method must only ever be called from a coroutine inside the emulation loop that is handling background work on behalf of a tile. Args: address (int): The tile address the RPC was called on. rpc_id (int): The ID of the RPC that was called. response (bytes): The bytes that should be returned to the caller of the RPC. """ pending = self._pending_rpcs.get(address) if pending is None: raise ArgumentError("No asynchronously RPC currently in progress on tile %d" % address) responder = pending.get(rpc_id) if responder is None: raise ArgumentError("RPC %04X is not running asynchronous on tile %d" % (rpc_id, address)) del pending[rpc_id] responder.set_result(response) self._rpc_queue.task_done()
[ "def", "finish_async_rpc", "(", "self", ",", "address", ",", "rpc_id", ",", "response", ")", ":", "pending", "=", "self", ".", "_pending_rpcs", ".", "get", "(", "address", ")", "if", "pending", "is", "None", ":", "raise", "ArgumentError", "(", "\"No asynchronously RPC currently in progress on tile %d\"", "%", "address", ")", "responder", "=", "pending", ".", "get", "(", "rpc_id", ")", "if", "responder", "is", "None", ":", "raise", "ArgumentError", "(", "\"RPC %04X is not running asynchronous on tile %d\"", "%", "(", "rpc_id", ",", "address", ")", ")", "del", "pending", "[", "rpc_id", "]", "responder", ".", "set_result", "(", "response", ")", "self", ".", "_rpc_queue", ".", "task_done", "(", ")" ]
38.969697
25.969697
def hashtags(self, quantity: int = 4) -> Union[str, list]: """Generate a list of hashtags. :param quantity: The quantity of hashtags. :return: The list of hashtags. :raises NonEnumerableError: if category is not in Hashtag. :Example: ['#love', '#sky', '#nice'] """ tags = ['#' + self.random.choice(HASHTAGS) for _ in range(quantity)] if int(quantity) == 1: return tags[0] return tags
[ "def", "hashtags", "(", "self", ",", "quantity", ":", "int", "=", "4", ")", "->", "Union", "[", "str", ",", "list", "]", ":", "tags", "=", "[", "'#'", "+", "self", ".", "random", ".", "choice", "(", "HASHTAGS", ")", "for", "_", "in", "range", "(", "quantity", ")", "]", "if", "int", "(", "quantity", ")", "==", "1", ":", "return", "tags", "[", "0", "]", "return", "tags" ]
28.411765
17.470588
def authenticate(self, username=None, password=None, **kwargs): """The authenticate method takes credentials as keyword arguments, usually username/email and password. Returns a user model if the Stormpath authentication was successful or None otherwise. It expects three variable to be defined in Django settings: \n STORMPATH_ID = "apiKeyId" \n STORMPATH_SECRET = "apiKeySecret" \n STORMPATH_APPLICATION = "https://api.stormpath.com/v1/applications/APP_UID" """ if username is None: UserModel = get_user_model() username = kwargs.get(UserModel.USERNAME_FIELD) account = self._stormpath_authenticate(username, password) if account is None: return None return self._create_or_get_user(account)
[ "def", "authenticate", "(", "self", ",", "username", "=", "None", ",", "password", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "username", "is", "None", ":", "UserModel", "=", "get_user_model", "(", ")", "username", "=", "kwargs", ".", "get", "(", "UserModel", ".", "USERNAME_FIELD", ")", "account", "=", "self", ".", "_stormpath_authenticate", "(", "username", ",", "password", ")", "if", "account", "is", "None", ":", "return", "None", "return", "self", ".", "_create_or_get_user", "(", "account", ")" ]
40
17.571429
def Decrypt(self, data): """A convenience method which pads and decrypts at once.""" decryptor = self.GetDecryptor() try: padded_data = decryptor.update(data) + decryptor.finalize() return self.UnPad(padded_data) except ValueError as e: raise CipherError(e)
[ "def", "Decrypt", "(", "self", ",", "data", ")", ":", "decryptor", "=", "self", ".", "GetDecryptor", "(", ")", "try", ":", "padded_data", "=", "decryptor", ".", "update", "(", "data", ")", "+", "decryptor", ".", "finalize", "(", ")", "return", "self", ".", "UnPad", "(", "padded_data", ")", "except", "ValueError", "as", "e", ":", "raise", "CipherError", "(", "e", ")" ]
31.555556
16.555556
def cos_r(self, N=None): # percent=0.9 """Return the squared cosines for each row.""" if not hasattr(self, 'F') or self.F.shape[1] < self.rank: self.fs_r(N=self.rank) # generate F self.dr = norm(self.F, axis=1)**2 # cheaper than diag(self.F.dot(self.F.T))? return apply_along_axis(lambda _: _/self.dr, 0, self.F[:, :N]**2)
[ "def", "cos_r", "(", "self", ",", "N", "=", "None", ")", ":", "# percent=0.9", "if", "not", "hasattr", "(", "self", ",", "'F'", ")", "or", "self", ".", "F", ".", "shape", "[", "1", "]", "<", "self", ".", "rank", ":", "self", ".", "fs_r", "(", "N", "=", "self", ".", "rank", ")", "# generate F", "self", ".", "dr", "=", "norm", "(", "self", ".", "F", ",", "axis", "=", "1", ")", "**", "2", "# cheaper than diag(self.F.dot(self.F.T))?", "return", "apply_along_axis", "(", "lambda", "_", ":", "_", "/", "self", ".", "dr", ",", "0", ",", "self", ".", "F", "[", ":", ",", ":", "N", "]", "**", "2", ")" ]
36.777778
15.222222
def list_endpoints(self): """Lists the known object storage endpoints.""" _filter = { 'hubNetworkStorage': {'vendorName': {'operation': 'Swift'}}, } endpoints = [] network_storage = self.client.call('Account', 'getHubNetworkStorage', mask=ENDPOINT_MASK, limit=1, filter=_filter) if network_storage: for node in network_storage['storageNodes']: endpoints.append({ 'datacenter': node['datacenter'], 'public': node['frontendIpAddress'], 'private': node['backendIpAddress'], }) return endpoints
[ "def", "list_endpoints", "(", "self", ")", ":", "_filter", "=", "{", "'hubNetworkStorage'", ":", "{", "'vendorName'", ":", "{", "'operation'", ":", "'Swift'", "}", "}", ",", "}", "endpoints", "=", "[", "]", "network_storage", "=", "self", ".", "client", ".", "call", "(", "'Account'", ",", "'getHubNetworkStorage'", ",", "mask", "=", "ENDPOINT_MASK", ",", "limit", "=", "1", ",", "filter", "=", "_filter", ")", "if", "network_storage", ":", "for", "node", "in", "network_storage", "[", "'storageNodes'", "]", ":", "endpoints", ".", "append", "(", "{", "'datacenter'", ":", "node", "[", "'datacenter'", "]", ",", "'public'", ":", "node", "[", "'frontendIpAddress'", "]", ",", "'private'", ":", "node", "[", "'backendIpAddress'", "]", ",", "}", ")", "return", "endpoints" ]
40.8
18.25
def fetch_git_sha(path, head=None): """ >>> fetch_git_sha(os.path.dirname(__file__)) """ if not head: head_path = os.path.join(path, '.git', 'HEAD') if not os.path.exists(head_path): raise InvalidGitRepository( 'Cannot identify HEAD for git repository at %s' % (path,)) with open(head_path, 'r') as fp: head = text_type(fp.read()).strip() if head.startswith('ref: '): head = head[5:] revision_file = os.path.join( path, '.git', *head.split('/') ) else: return head else: revision_file = os.path.join(path, '.git', 'refs', 'heads', head) if not os.path.exists(revision_file): if not os.path.exists(os.path.join(path, '.git')): raise InvalidGitRepository( '%s does not seem to be the root of a git repository' % (path,)) # Check for our .git/packed-refs' file since a `git gc` may have run # https://git-scm.com/book/en/v2/Git-Internals-Maintenance-and-Data-Recovery packed_file = os.path.join(path, '.git', 'packed-refs') if os.path.exists(packed_file): with open(packed_file) as fh: for line in fh: line = line.rstrip() if line and line[:1] not in ('#', '^'): try: revision, ref = line.split(' ', 1) except ValueError: continue if ref == head: return text_type(revision) raise InvalidGitRepository( 'Unable to find ref to head "%s" in repository' % (head,)) with open(revision_file) as fh: return text_type(fh.read()).strip()
[ "def", "fetch_git_sha", "(", "path", ",", "head", "=", "None", ")", ":", "if", "not", "head", ":", "head_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'.git'", ",", "'HEAD'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "head_path", ")", ":", "raise", "InvalidGitRepository", "(", "'Cannot identify HEAD for git repository at %s'", "%", "(", "path", ",", ")", ")", "with", "open", "(", "head_path", ",", "'r'", ")", "as", "fp", ":", "head", "=", "text_type", "(", "fp", ".", "read", "(", ")", ")", ".", "strip", "(", ")", "if", "head", ".", "startswith", "(", "'ref: '", ")", ":", "head", "=", "head", "[", "5", ":", "]", "revision_file", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'.git'", ",", "*", "head", ".", "split", "(", "'/'", ")", ")", "else", ":", "return", "head", "else", ":", "revision_file", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'.git'", ",", "'refs'", ",", "'heads'", ",", "head", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "revision_file", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'.git'", ")", ")", ":", "raise", "InvalidGitRepository", "(", "'%s does not seem to be the root of a git repository'", "%", "(", "path", ",", ")", ")", "# Check for our .git/packed-refs' file since a `git gc` may have run", "# https://git-scm.com/book/en/v2/Git-Internals-Maintenance-and-Data-Recovery", "packed_file", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'.git'", ",", "'packed-refs'", ")", "if", "os", ".", "path", ".", "exists", "(", "packed_file", ")", ":", "with", "open", "(", "packed_file", ")", "as", "fh", ":", "for", "line", "in", "fh", ":", "line", "=", "line", ".", "rstrip", "(", ")", "if", "line", "and", "line", "[", ":", "1", "]", "not", "in", "(", "'#'", ",", "'^'", ")", ":", "try", ":", "revision", ",", "ref", "=", "line", ".", "split", "(", "' '", ",", "1", ")", "except", "ValueError", ":", "continue", "if", "ref", "==", "head", ":", "return", "text_type", "(", "revision", ")", "raise", "InvalidGitRepository", "(", "'Unable to find ref to head \"%s\" in repository'", "%", "(", "head", ",", ")", ")", "with", "open", "(", "revision_file", ")", "as", "fh", ":", "return", "text_type", "(", "fh", ".", "read", "(", ")", ")", ".", "strip", "(", ")" ]
37.166667
16.333333
def prepare_roomMap(roomMap): """ Prepares the roomMap to be JSONified. That is: convert the non JSON serializable objects such as set() """ ret = {} for room in roomMap: ret[room] = [roomMap[room].name, list(roomMap[room].pcs)] return ret
[ "def", "prepare_roomMap", "(", "roomMap", ")", ":", "ret", "=", "{", "}", "for", "room", "in", "roomMap", ":", "ret", "[", "room", "]", "=", "[", "roomMap", "[", "room", "]", ".", "name", ",", "list", "(", "roomMap", "[", "room", "]", ".", "pcs", ")", "]", "return", "ret" ]
37.857143
15.142857
def columns(self, model=None): """ Returns a generator that loops through the columns that are associated with this query. :return <generator>(orb.Column) """ column = self.column(model=model) if column: yield column check = self.__value if not isinstance(check, (list, set, tuple)): check = (check,) for val in check: if isinstance(val, (Query, QueryCompound)): for col in val.columns(model): yield col
[ "def", "columns", "(", "self", ",", "model", "=", "None", ")", ":", "column", "=", "self", ".", "column", "(", "model", "=", "model", ")", "if", "column", ":", "yield", "column", "check", "=", "self", ".", "__value", "if", "not", "isinstance", "(", "check", ",", "(", "list", ",", "set", ",", "tuple", ")", ")", ":", "check", "=", "(", "check", ",", ")", "for", "val", "in", "check", ":", "if", "isinstance", "(", "val", ",", "(", "Query", ",", "QueryCompound", ")", ")", ":", "for", "col", "in", "val", ".", "columns", "(", "model", ")", ":", "yield", "col" ]
30.277778
16.833333
def validate(path, format='json', approved_applications=None, determined=True, listed=True, expectation=PACKAGE_ANY, for_appversions=None, overrides=None, timeout=-1, compat_test=False, **kw): """ Perform validation in one easy step! `path`: *Required* A file system path to the package to be validated. `format`: The format to return the results in. Defaults to "json". Currently, any other format will simply return the error bundle. `approved_applications`: Path to the list of approved application versions `determined`: If set to `False`, validation will halt at the end of the first tier that raises errors. `listed`: Whether the app is headed for the app marketplace or AMO. Defaults to `True`. `expectation`: The type of package that should be expected. Must be a symbolic constant from validator.constants (i.e.: validator.constants.PACKAGE_*). Defaults to PACKAGE_ANY. `for_appversions`: A dict of app GUIDs referencing lists of versions. Determines which version-dependant tests should be run. `timeout`: Number of seconds before aborting addon validation, or -1 to run with no timeout. `compat_tests`: A flag to signal the validator to skip tests which should not be run during compatibility bumps. Defaults to `False`. """ bundle = ErrorBundle(listed=listed, determined=determined, overrides=overrides, for_appversions=for_appversions) bundle.save_resource('is_compat_test', compat_test) if approved_applications is None: approved_applications = os.path.join(os.path.dirname(__file__), 'app_versions.json') if isinstance(approved_applications, types.StringTypes): # Load up the target applications if the approved applications is a # path (string). with open(approved_applications) as approved_apps: apps = json.load(approved_apps) elif isinstance(approved_applications, dict): # If the lists of approved applications are already in a dict, just use # that instead of trying to pull from a file. apps = approved_applications else: raise ValueError('Unknown format for `approved_applications`.') constants.APPROVED_APPLICATIONS.clear() constants.APPROVED_APPLICATIONS.update(apps) submain.prepare_package(bundle, path, expectation, for_appversions=for_appversions, timeout=timeout) return format_result(bundle, format)
[ "def", "validate", "(", "path", ",", "format", "=", "'json'", ",", "approved_applications", "=", "None", ",", "determined", "=", "True", ",", "listed", "=", "True", ",", "expectation", "=", "PACKAGE_ANY", ",", "for_appversions", "=", "None", ",", "overrides", "=", "None", ",", "timeout", "=", "-", "1", ",", "compat_test", "=", "False", ",", "*", "*", "kw", ")", ":", "bundle", "=", "ErrorBundle", "(", "listed", "=", "listed", ",", "determined", "=", "determined", ",", "overrides", "=", "overrides", ",", "for_appversions", "=", "for_appversions", ")", "bundle", ".", "save_resource", "(", "'is_compat_test'", ",", "compat_test", ")", "if", "approved_applications", "is", "None", ":", "approved_applications", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'app_versions.json'", ")", "if", "isinstance", "(", "approved_applications", ",", "types", ".", "StringTypes", ")", ":", "# Load up the target applications if the approved applications is a", "# path (string).", "with", "open", "(", "approved_applications", ")", "as", "approved_apps", ":", "apps", "=", "json", ".", "load", "(", "approved_apps", ")", "elif", "isinstance", "(", "approved_applications", ",", "dict", ")", ":", "# If the lists of approved applications are already in a dict, just use", "# that instead of trying to pull from a file.", "apps", "=", "approved_applications", "else", ":", "raise", "ValueError", "(", "'Unknown format for `approved_applications`.'", ")", "constants", ".", "APPROVED_APPLICATIONS", ".", "clear", "(", ")", "constants", ".", "APPROVED_APPLICATIONS", ".", "update", "(", "apps", ")", "submain", ".", "prepare_package", "(", "bundle", ",", "path", ",", "expectation", ",", "for_appversions", "=", "for_appversions", ",", "timeout", "=", "timeout", ")", "return", "format_result", "(", "bundle", ",", "format", ")" ]
38.842857
20.1
def nest_values(self, levels, level=0, metric=None, dims=()): """ Nest values at each level on the back-end with access and setting, instead of summing from the bottom. """ if not level: return [{ 'name': m, 'val': levels[0][m], 'children': self.nest_values(levels, 1, m), } for m in levels[0].index] if level == 1: return [{ 'name': i, 'val': levels[1][metric][i], 'children': self.nest_values(levels, 2, metric, (i,)), } for i in levels[1][metric].index] if level >= len(levels): return [] return [{ 'name': i, 'val': levels[level][metric][dims][i], 'children': self.nest_values( levels, level + 1, metric, dims + (i,), ), } for i in levels[level][metric][dims].index]
[ "def", "nest_values", "(", "self", ",", "levels", ",", "level", "=", "0", ",", "metric", "=", "None", ",", "dims", "=", "(", ")", ")", ":", "if", "not", "level", ":", "return", "[", "{", "'name'", ":", "m", ",", "'val'", ":", "levels", "[", "0", "]", "[", "m", "]", ",", "'children'", ":", "self", ".", "nest_values", "(", "levels", ",", "1", ",", "m", ")", ",", "}", "for", "m", "in", "levels", "[", "0", "]", ".", "index", "]", "if", "level", "==", "1", ":", "return", "[", "{", "'name'", ":", "i", ",", "'val'", ":", "levels", "[", "1", "]", "[", "metric", "]", "[", "i", "]", ",", "'children'", ":", "self", ".", "nest_values", "(", "levels", ",", "2", ",", "metric", ",", "(", "i", ",", ")", ")", ",", "}", "for", "i", "in", "levels", "[", "1", "]", "[", "metric", "]", ".", "index", "]", "if", "level", ">=", "len", "(", "levels", ")", ":", "return", "[", "]", "return", "[", "{", "'name'", ":", "i", ",", "'val'", ":", "levels", "[", "level", "]", "[", "metric", "]", "[", "dims", "]", "[", "i", "]", ",", "'children'", ":", "self", ".", "nest_values", "(", "levels", ",", "level", "+", "1", ",", "metric", ",", "dims", "+", "(", "i", ",", ")", ",", ")", ",", "}", "for", "i", "in", "levels", "[", "level", "]", "[", "metric", "]", "[", "dims", "]", ".", "index", "]" ]
36.038462
13.807692
def log_scalar(self, name, value, step=None): """ Add a new measurement. The measurement will be processed by the MongoDB* observer during a heartbeat event. Other observers are not yet supported. :param name: The name of the metric, e.g. training.loss :param value: The measured value :param step: The step number (integer), e.g. the iteration number If not specified, an internal counter for each metric is used, incremented by one. """ # Method added in change https://github.com/chovanecm/sacred/issues/4 # The same as Run.log_scalar return self.current_run.log_scalar(name, value, step)
[ "def", "log_scalar", "(", "self", ",", "name", ",", "value", ",", "step", "=", "None", ")", ":", "# Method added in change https://github.com/chovanecm/sacred/issues/4", "# The same as Run.log_scalar", "return", "self", ".", "current_run", ".", "log_scalar", "(", "name", ",", "value", ",", "step", ")" ]
41.941176
17.235294
def lstsq(a, b, rcond=None, weighted=False, extrainfo=False): """ Least-squares solution ``x`` to ``a @ x = b`` for |GVar|\s. Here ``x`` is defined to be the solution that minimizes ``||b - a @ x||``. If ``b`` has a covariance matrix, another option is to weight the norm with the inverse covariance matrix: i.e., minimize ``|| isig @ b - isig @ a @ x||`` where ``isig`` is the square root of the inverse of ``b``'s covariance matrix. Set parameter ``weighted=True`` to obtain the weighted-least-squares solution. Args: a : Matrix/array of shape ``(M,N)`` containing numbers and/or |GVar|\s. b : Vector/array of shape ``(M,)`` containing numbers and/or |GVar|\s. rcond (float): Cutoff for singular values of ``a``. Singular values smaller than ``rcond`` times the maximum eigenvalue are ignored. Default (``rcond=None``) is ``max(M,N)`` times machine precision. weighted (bool): If ``True``, use weighted least squares; otherwise use unweighted least squares. extrainfo (bool): If ``False`` (default) only ``x`` is returned; otherwise ``(x, residual, rank, s)`` is returned. Returns: Array ``x`` of shape ``(N,)`` that minimizes ``|| b - a @ x||`` if ``extrainfo==False`` (default); otherwise returns a tuple ``(x, residual, rank, s)`` where ``residual`` is the sum of the squares of ``b - a @ x``, ``rank`` is the rank of matrix ``a``, and ``s`` is an array containing the singular values. """ a = numpy.asarray(a) b = numpy.asarray(b) if a.ndim != 2: raise ValueError( 'a must have dimension 2: actual shape = ' + str(a.shape) ) if a.shape[0] != b.shape[0]: raise ValueError( 'a and b shapes mismatched: {} vs {}'.format(a.shape, b.shape) ) if rcond is None: rcond = numpy.finfo(float).eps * max(a.shape) if weighted: try: cov = gvar.evalcov(b) except ValueError: raise ValueError('b does not have a covariance matrix') try: icov = numpy.linalg.inv(cov) except numpy.linalg.LinAlgError: raise ValueError("b's covariance matrix cannot be inverted") ata = a.T.dot(icov.dot(a)) atb = a.T.dot(icov.dot(b)) else: ata = a.T.dot(a) atb = a.T.dot(b) val, vec = gvar.linalg.eigh(ata) maxval = numpy.max(gvar.mean(val)) # N.B. val > 0 required ans = 0 for i in range(len(val)): if gvar.mean(val[i]) < rcond * maxval: continue ans += vec[:, i] * vec[:, i].dot(atb) / val[i] if not extrainfo: return ans val = val[val >= rcond * maxval] ** 0.5 d = a.dot(ans) - b residual = d.dot(icov.dot(d)) if weighted else d.dot(d) k = len(val) return ans, residual, k, val
[ "def", "lstsq", "(", "a", ",", "b", ",", "rcond", "=", "None", ",", "weighted", "=", "False", ",", "extrainfo", "=", "False", ")", ":", "a", "=", "numpy", ".", "asarray", "(", "a", ")", "b", "=", "numpy", ".", "asarray", "(", "b", ")", "if", "a", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "'a must have dimension 2: actual shape = '", "+", "str", "(", "a", ".", "shape", ")", ")", "if", "a", ".", "shape", "[", "0", "]", "!=", "b", ".", "shape", "[", "0", "]", ":", "raise", "ValueError", "(", "'a and b shapes mismatched: {} vs {}'", ".", "format", "(", "a", ".", "shape", ",", "b", ".", "shape", ")", ")", "if", "rcond", "is", "None", ":", "rcond", "=", "numpy", ".", "finfo", "(", "float", ")", ".", "eps", "*", "max", "(", "a", ".", "shape", ")", "if", "weighted", ":", "try", ":", "cov", "=", "gvar", ".", "evalcov", "(", "b", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'b does not have a covariance matrix'", ")", "try", ":", "icov", "=", "numpy", ".", "linalg", ".", "inv", "(", "cov", ")", "except", "numpy", ".", "linalg", ".", "LinAlgError", ":", "raise", "ValueError", "(", "\"b's covariance matrix cannot be inverted\"", ")", "ata", "=", "a", ".", "T", ".", "dot", "(", "icov", ".", "dot", "(", "a", ")", ")", "atb", "=", "a", ".", "T", ".", "dot", "(", "icov", ".", "dot", "(", "b", ")", ")", "else", ":", "ata", "=", "a", ".", "T", ".", "dot", "(", "a", ")", "atb", "=", "a", ".", "T", ".", "dot", "(", "b", ")", "val", ",", "vec", "=", "gvar", ".", "linalg", ".", "eigh", "(", "ata", ")", "maxval", "=", "numpy", ".", "max", "(", "gvar", ".", "mean", "(", "val", ")", ")", "# N.B. val > 0 required", "ans", "=", "0", "for", "i", "in", "range", "(", "len", "(", "val", ")", ")", ":", "if", "gvar", ".", "mean", "(", "val", "[", "i", "]", ")", "<", "rcond", "*", "maxval", ":", "continue", "ans", "+=", "vec", "[", ":", ",", "i", "]", "*", "vec", "[", ":", ",", "i", "]", ".", "dot", "(", "atb", ")", "/", "val", "[", "i", "]", "if", "not", "extrainfo", ":", "return", "ans", "val", "=", "val", "[", "val", ">=", "rcond", "*", "maxval", "]", "**", "0.5", "d", "=", "a", ".", "dot", "(", "ans", ")", "-", "b", "residual", "=", "d", ".", "dot", "(", "icov", ".", "dot", "(", "d", ")", ")", "if", "weighted", "else", "d", ".", "dot", "(", "d", ")", "k", "=", "len", "(", "val", ")", "return", "ans", ",", "residual", ",", "k", ",", "val" ]
42.432836
21.447761
def create_networks(self, ids, id_vlan): """Set column 'active = 1' in tables redeipv4 and redeipv6] :param ids: ID for NetworkIPv4 and/or NetworkIPv6 :return: Nothing """ network_map = dict() network_map['ids'] = ids network_map['id_vlan'] = id_vlan code, xml = self.submit( {'network': network_map}, 'PUT', 'network/create/') return self.response(code, xml)
[ "def", "create_networks", "(", "self", ",", "ids", ",", "id_vlan", ")", ":", "network_map", "=", "dict", "(", ")", "network_map", "[", "'ids'", "]", "=", "ids", "network_map", "[", "'id_vlan'", "]", "=", "id_vlan", "code", ",", "xml", "=", "self", ".", "submit", "(", "{", "'network'", ":", "network_map", "}", ",", "'PUT'", ",", "'network/create/'", ")", "return", "self", ".", "response", "(", "code", ",", "xml", ")" ]
27.0625
17.8125
def delete_model(self, model_id, erase=False): """Delete the model with given identifier in the database. Returns the handle for the deleted model or None if object identifier is unknown. Parameters ---------- model_id : string Unique model identifier erase : Boolean, optinal If true, the record will be deleted from the database. Otherwise, the active flag will be set to False to support provenance tracking. Returns ------- ModelHandle """ return self.delete_object(model_id, erase=erase)
[ "def", "delete_model", "(", "self", ",", "model_id", ",", "erase", "=", "False", ")", ":", "return", "self", ".", "delete_object", "(", "model_id", ",", "erase", "=", "erase", ")" ]
35.411765
21.117647
def decompress(obj, return_type="bytes"): """ De-compress it to it's original. :param obj: Compressed object, could be bytes or str. :param return_type: if bytes, then return bytes; if str, then use base64.b64decode; if obj, then use pickle.loads return an object. """ if isinstance(obj, binary_type): b = zlib.decompress(obj) elif isinstance(obj, string_types): b = zlib.decompress(base64.b64decode(obj.encode("utf-8"))) else: raise TypeError("input cannot be anything other than str and bytes!") if return_type == "bytes": return b elif return_type == "str": return b.decode("utf-8") elif return_type == "obj": return pickle.loads(b) else: raise ValueError( "'return_type' has to be one of 'bytes', 'str' or 'obj'!")
[ "def", "decompress", "(", "obj", ",", "return_type", "=", "\"bytes\"", ")", ":", "if", "isinstance", "(", "obj", ",", "binary_type", ")", ":", "b", "=", "zlib", ".", "decompress", "(", "obj", ")", "elif", "isinstance", "(", "obj", ",", "string_types", ")", ":", "b", "=", "zlib", ".", "decompress", "(", "base64", ".", "b64decode", "(", "obj", ".", "encode", "(", "\"utf-8\"", ")", ")", ")", "else", ":", "raise", "TypeError", "(", "\"input cannot be anything other than str and bytes!\"", ")", "if", "return_type", "==", "\"bytes\"", ":", "return", "b", "elif", "return_type", "==", "\"str\"", ":", "return", "b", ".", "decode", "(", "\"utf-8\"", ")", "elif", "return_type", "==", "\"obj\"", ":", "return", "pickle", ".", "loads", "(", "b", ")", "else", ":", "raise", "ValueError", "(", "\"'return_type' has to be one of 'bytes', 'str' or 'obj'!\"", ")" ]
34.208333
17.458333
def get_account(self, account_id, **kwargs): """Retrieves a CDN account with the specified account ID. :param account_id int: the numeric ID associated with the CDN account. :param dict \\*\\*kwargs: additional arguments to include in the object mask. """ if 'mask' not in kwargs: kwargs['mask'] = 'status' return self.account.getObject(id=account_id, **kwargs)
[ "def", "get_account", "(", "self", ",", "account_id", ",", "*", "*", "kwargs", ")", ":", "if", "'mask'", "not", "in", "kwargs", ":", "kwargs", "[", "'mask'", "]", "=", "'status'", "return", "self", ".", "account", ".", "getObject", "(", "id", "=", "account_id", ",", "*", "*", "kwargs", ")" ]
37.083333
19.75
def get_id(self): """Returns unique id of an alignment. """ return hash(str(self.title) + str(self.best_score()) + str(self.hit_def))
[ "def", "get_id", "(", "self", ")", ":", "return", "hash", "(", "str", "(", "self", ".", "title", ")", "+", "str", "(", "self", ".", "best_score", "(", ")", ")", "+", "str", "(", "self", ".", "hit_def", ")", ")" ]
49.333333
21.333333
def iterativeFetch(query, batchSize=default_batch_size): """ Returns rows of a sql fetch query on demand """ while True: rows = query.fetchmany(batchSize) if not rows: break rowDicts = sqliteRowsToDicts(rows) for rowDict in rowDicts: yield rowDict
[ "def", "iterativeFetch", "(", "query", ",", "batchSize", "=", "default_batch_size", ")", ":", "while", "True", ":", "rows", "=", "query", ".", "fetchmany", "(", "batchSize", ")", "if", "not", "rows", ":", "break", "rowDicts", "=", "sqliteRowsToDicts", "(", "rows", ")", "for", "rowDict", "in", "rowDicts", ":", "yield", "rowDict" ]
28.090909
10.636364
def t_newline(self, t): r'\n+' t.lexer.lineno += len(t.value) self.latest_newline = t.lexpos
[ "def", "t_newline", "(", "self", ",", "t", ")", ":", "t", ".", "lexer", ".", "lineno", "+=", "len", "(", "t", ".", "value", ")", "self", ".", "latest_newline", "=", "t", ".", "lexpos" ]
28.25
11.75
def insert_list_of_dictionaries_into_database_tables( dbConn, log, dictList, dbTableName, uniqueKeyList=[], dateModified=False, dateCreated=True, batchSize=2500, replace=False, dbSettings=False): """insert list of dictionaries into database tables **Key Arguments:** - ``dbConn`` -- mysql database connection - ``log`` -- logger - ``dictList`` -- list of python dictionaries to add to the database table - ``dbTableName`` -- name of the database table - ``uniqueKeyList`` -- a list of column names to append as a unique constraint on the database - ``dateModified`` -- add the modification date as a column in the database - ``dateCreated`` -- add the created date as a column in the database - ``batchSize`` -- batch the insert commands into *batchSize* batches - ``replace`` -- repalce row if a duplicate is found - ``dbSettings`` -- pass in the database settings so multiprocessing can establish one connection per process (might not be faster) **Return:** - None **Usage:** .. code-block:: python from fundamentals.mysql import insert_list_of_dictionaries_into_database_tables insert_list_of_dictionaries_into_database_tables( dbConn=dbConn, log=log, dictList=dictList, dbTableName="test_insert_many", uniqueKeyList=["col1", "col3"], dateModified=False, batchSize=2500 ) """ log.debug( 'completed the ````insert_list_of_dictionaries_into_database_tables`` function') global count global totalCount global globalDbConn global sharedList reDate = re.compile('^[0-9]{4}-[0-9]{2}-[0-9]{2}T') if dbSettings: globalDbConn = dbSettings else: globalDbConn = dbConn if len(dictList) == 0: log.warning( 'the dictionary to be added to the database is empty' % locals()) return None if len(dictList): convert_dictionary_to_mysql_table( dbConn=dbConn, log=log, dictionary=dictList[0], dbTableName=dbTableName, uniqueKeyList=uniqueKeyList, dateModified=dateModified, reDatetime=reDate, replace=replace, dateCreated=dateCreated) dictList = dictList[1:] dbConn.autocommit(False) if len(dictList): total = len(dictList) batches = int(total / batchSize) start = 0 end = 0 sharedList = [] for i in range(batches + 1): end = end + batchSize start = i * batchSize thisBatch = dictList[start:end] sharedList.append((thisBatch, end)) totalCount = total + 1 ltotalCount = totalCount print "Starting to insert %(ltotalCount)s rows into %(dbTableName)s" % locals() print dbSettings if dbSettings == False: fmultiprocess( log=log, function=_insert_single_batch_into_database, inputArray=range(len(sharedList)), dbTableName=dbTableName, uniqueKeyList=uniqueKeyList, dateModified=dateModified, replace=replace, batchSize=batchSize, reDatetime=reDate, dateCreated=dateCreated ) else: fmultiprocess(log=log, function=_add_dictlist_to_database_via_load_in_file, inputArray=range(len(sharedList)), dbTablename=dbTableName, dbSettings=dbSettings, dateModified=dateModified) sys.stdout.write("\x1b[1A\x1b[2K") print "%(ltotalCount)s / %(ltotalCount)s rows inserted into %(dbTableName)s" % locals() log.debug( 'completed the ``insert_list_of_dictionaries_into_database_tables`` function') return None
[ "def", "insert_list_of_dictionaries_into_database_tables", "(", "dbConn", ",", "log", ",", "dictList", ",", "dbTableName", ",", "uniqueKeyList", "=", "[", "]", ",", "dateModified", "=", "False", ",", "dateCreated", "=", "True", ",", "batchSize", "=", "2500", ",", "replace", "=", "False", ",", "dbSettings", "=", "False", ")", ":", "log", ".", "debug", "(", "'completed the ````insert_list_of_dictionaries_into_database_tables`` function'", ")", "global", "count", "global", "totalCount", "global", "globalDbConn", "global", "sharedList", "reDate", "=", "re", ".", "compile", "(", "'^[0-9]{4}-[0-9]{2}-[0-9]{2}T'", ")", "if", "dbSettings", ":", "globalDbConn", "=", "dbSettings", "else", ":", "globalDbConn", "=", "dbConn", "if", "len", "(", "dictList", ")", "==", "0", ":", "log", ".", "warning", "(", "'the dictionary to be added to the database is empty'", "%", "locals", "(", ")", ")", "return", "None", "if", "len", "(", "dictList", ")", ":", "convert_dictionary_to_mysql_table", "(", "dbConn", "=", "dbConn", ",", "log", "=", "log", ",", "dictionary", "=", "dictList", "[", "0", "]", ",", "dbTableName", "=", "dbTableName", ",", "uniqueKeyList", "=", "uniqueKeyList", ",", "dateModified", "=", "dateModified", ",", "reDatetime", "=", "reDate", ",", "replace", "=", "replace", ",", "dateCreated", "=", "dateCreated", ")", "dictList", "=", "dictList", "[", "1", ":", "]", "dbConn", ".", "autocommit", "(", "False", ")", "if", "len", "(", "dictList", ")", ":", "total", "=", "len", "(", "dictList", ")", "batches", "=", "int", "(", "total", "/", "batchSize", ")", "start", "=", "0", "end", "=", "0", "sharedList", "=", "[", "]", "for", "i", "in", "range", "(", "batches", "+", "1", ")", ":", "end", "=", "end", "+", "batchSize", "start", "=", "i", "*", "batchSize", "thisBatch", "=", "dictList", "[", "start", ":", "end", "]", "sharedList", ".", "append", "(", "(", "thisBatch", ",", "end", ")", ")", "totalCount", "=", "total", "+", "1", "ltotalCount", "=", "totalCount", "print", "\"Starting to insert %(ltotalCount)s rows into %(dbTableName)s\"", "%", "locals", "(", ")", "print", "dbSettings", "if", "dbSettings", "==", "False", ":", "fmultiprocess", "(", "log", "=", "log", ",", "function", "=", "_insert_single_batch_into_database", ",", "inputArray", "=", "range", "(", "len", "(", "sharedList", ")", ")", ",", "dbTableName", "=", "dbTableName", ",", "uniqueKeyList", "=", "uniqueKeyList", ",", "dateModified", "=", "dateModified", ",", "replace", "=", "replace", ",", "batchSize", "=", "batchSize", ",", "reDatetime", "=", "reDate", ",", "dateCreated", "=", "dateCreated", ")", "else", ":", "fmultiprocess", "(", "log", "=", "log", ",", "function", "=", "_add_dictlist_to_database_via_load_in_file", ",", "inputArray", "=", "range", "(", "len", "(", "sharedList", ")", ")", ",", "dbTablename", "=", "dbTableName", ",", "dbSettings", "=", "dbSettings", ",", "dateModified", "=", "dateModified", ")", "sys", ".", "stdout", ".", "write", "(", "\"\\x1b[1A\\x1b[2K\"", ")", "print", "\"%(ltotalCount)s / %(ltotalCount)s rows inserted into %(dbTableName)s\"", "%", "locals", "(", ")", "log", ".", "debug", "(", "'completed the ``insert_list_of_dictionaries_into_database_tables`` function'", ")", "return", "None" ]
31.412698
22.539683
def remove_certain_leaves(tr, to_remove=lambda node: False): """ Removes all the branches leading to leaves identified positively by to_remove function. :param tr: the tree of interest (ete3 Tree) :param to_remove: a method to check is a leaf should be removed. :return: void, modifies the initial tree. """ tips = [tip for tip in tr if to_remove(tip)] for node in tips: if node.is_root(): return None parent = node.up parent.remove_child(node) # If the parent node has only one child now, merge them. if len(parent.children) == 1: brother = parent.children[0] brother.dist += parent.dist if parent.is_root(): brother.up = None tr = brother else: grandparent = parent.up grandparent.remove_child(parent) grandparent.add_child(brother) return tr
[ "def", "remove_certain_leaves", "(", "tr", ",", "to_remove", "=", "lambda", "node", ":", "False", ")", ":", "tips", "=", "[", "tip", "for", "tip", "in", "tr", "if", "to_remove", "(", "tip", ")", "]", "for", "node", "in", "tips", ":", "if", "node", ".", "is_root", "(", ")", ":", "return", "None", "parent", "=", "node", ".", "up", "parent", ".", "remove_child", "(", "node", ")", "# If the parent node has only one child now, merge them.", "if", "len", "(", "parent", ".", "children", ")", "==", "1", ":", "brother", "=", "parent", ".", "children", "[", "0", "]", "brother", ".", "dist", "+=", "parent", ".", "dist", "if", "parent", ".", "is_root", "(", ")", ":", "brother", ".", "up", "=", "None", "tr", "=", "brother", "else", ":", "grandparent", "=", "parent", ".", "up", "grandparent", ".", "remove_child", "(", "parent", ")", "grandparent", ".", "add_child", "(", "brother", ")", "return", "tr" ]
36
13.538462
def _enable_rpcs(self, conn, services, timeout=1.0): """Prepare this device to receive RPCs """ #FIXME: Check for characteristic existence in a try/catch and return failure if not found success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], True, timeout) if not success: return success, result return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], True, timeout)
[ "def", "_enable_rpcs", "(", "self", ",", "conn", ",", "services", ",", "timeout", "=", "1.0", ")", ":", "#FIXME: Check for characteristic existence in a try/catch and return failure if not found", "success", ",", "result", "=", "self", ".", "_set_notification", "(", "conn", ",", "services", "[", "TileBusService", "]", "[", "'characteristics'", "]", "[", "TileBusReceiveHeaderCharacteristic", "]", ",", "True", ",", "timeout", ")", "if", "not", "success", ":", "return", "success", ",", "result", "return", "self", ".", "_set_notification", "(", "conn", ",", "services", "[", "TileBusService", "]", "[", "'characteristics'", "]", "[", "TileBusReceivePayloadCharacteristic", "]", ",", "True", ",", "timeout", ")" ]
50.272727
38.363636
def appendQKeyEvent(self, keyEvent: QtGui.QKeyEvent): """ Append another key to the key sequence represented by this object. |Args| * ``keyEvent`` (**QKeyEvent**): the key to add. |Returns| **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type. """ # Store the QKeyEvent. self.keylistKeyEvent.append(keyEvent) # Convenience shortcuts. mod = keyEvent.modifiers() key = keyEvent.key() # Add the modifier and key to the list. The modifier is a # QFlag structure and must by typecast to an integer to avoid # difficulties with the hashing in the ``match`` routine of # the ``QtmacsKeymap`` object. self.keylistQtConstants.append((int(mod), key))
[ "def", "appendQKeyEvent", "(", "self", ",", "keyEvent", ":", "QtGui", ".", "QKeyEvent", ")", ":", "# Store the QKeyEvent.", "self", ".", "keylistKeyEvent", ".", "append", "(", "keyEvent", ")", "# Convenience shortcuts.", "mod", "=", "keyEvent", ".", "modifiers", "(", ")", "key", "=", "keyEvent", ".", "key", "(", ")", "# Add the modifier and key to the list. The modifier is a", "# QFlag structure and must by typecast to an integer to avoid", "# difficulties with the hashing in the ``match`` routine of", "# the ``QtmacsKeymap`` object.", "self", ".", "keylistQtConstants", ".", "append", "(", "(", "int", "(", "mod", ")", ",", "key", ")", ")" ]
28.892857
23.464286
def get_by_id(self, id_code: str) -> Currency or None: """ Get currency by ID :param id_code: set, like "R01305" :return: currency or None. """ try: return [_ for _ in self.currencies if _.id == id_code][0] except IndexError: return None
[ "def", "get_by_id", "(", "self", ",", "id_code", ":", "str", ")", "->", "Currency", "or", "None", ":", "try", ":", "return", "[", "_", "for", "_", "in", "self", ".", "currencies", "if", "_", ".", "id", "==", "id_code", "]", "[", "0", "]", "except", "IndexError", ":", "return", "None" ]
30.1
15
def get_chromosome_priority(chrom, chrom_dict={}): """ Return the chromosome priority Arguments: chrom (str): The cromosome name from the vcf chrom_dict (dict): A map of chromosome names and theis priority Return: priority (str): The priority for this chromosom """ priority = 0 chrom = str(chrom).lstrip('chr') if chrom_dict: priority = chrom_dict.get(chrom, 0) else: try: if int(chrom) < 23: priority = int(chrom) except ValueError: if chrom == 'X': priority = 23 elif chrom == 'Y': priority = 24 elif chrom == 'MT': priority = 25 else: priority = 26 return str(priority)
[ "def", "get_chromosome_priority", "(", "chrom", ",", "chrom_dict", "=", "{", "}", ")", ":", "priority", "=", "0", "chrom", "=", "str", "(", "chrom", ")", ".", "lstrip", "(", "'chr'", ")", "if", "chrom_dict", ":", "priority", "=", "chrom_dict", ".", "get", "(", "chrom", ",", "0", ")", "else", ":", "try", ":", "if", "int", "(", "chrom", ")", "<", "23", ":", "priority", "=", "int", "(", "chrom", ")", "except", "ValueError", ":", "if", "chrom", "==", "'X'", ":", "priority", "=", "23", "elif", "chrom", "==", "'Y'", ":", "priority", "=", "24", "elif", "chrom", "==", "'MT'", ":", "priority", "=", "25", "else", ":", "priority", "=", "26", "return", "str", "(", "priority", ")" ]
24.212121
18.090909
def _extract_ld_data(data, data_format=None, **kwargs): """Extract the given :attr:`data` into a :class:`~.ExtractedLinkedDataResult` with the resulting data stripped of any Linked Data specifics. Any missing Linked Data properties are returned as ``None`` in the resulting :class:`~.ExtractLinkedDataResult`. Does not modify the given :attr:`data`. """ if not data_format: data_format = _get_format_from_data(data) extract_ld_data_fn = _data_format_resolver(data_format, { 'jsonld': _extract_ld_data_from_jsonld, 'json': _extract_ld_data_from_json, 'ipld': _extract_ld_data_from_ipld, }) return extract_ld_data_fn(data, **kwargs)
[ "def", "_extract_ld_data", "(", "data", ",", "data_format", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "data_format", ":", "data_format", "=", "_get_format_from_data", "(", "data", ")", "extract_ld_data_fn", "=", "_data_format_resolver", "(", "data_format", ",", "{", "'jsonld'", ":", "_extract_ld_data_from_jsonld", ",", "'json'", ":", "_extract_ld_data_from_json", ",", "'ipld'", ":", "_extract_ld_data_from_ipld", ",", "}", ")", "return", "extract_ld_data_fn", "(", "data", ",", "*", "*", "kwargs", ")" ]
38.388889
14.666667
def add(self, key, value, description='', history_max_size=None): """Add an new item (key, value) to the current history.""" if key not in self.stats_history: self.stats_history[key] = GlancesAttribute(key, description=description, history_max_size=history_max_size) self.stats_history[key].value = value
[ "def", "add", "(", "self", ",", "key", ",", "value", ",", "description", "=", "''", ",", "history_max_size", "=", "None", ")", ":", "if", "key", "not", "in", "self", ".", "stats_history", ":", "self", ".", "stats_history", "[", "key", "]", "=", "GlancesAttribute", "(", "key", ",", "description", "=", "description", ",", "history_max_size", "=", "history_max_size", ")", "self", ".", "stats_history", "[", "key", "]", ".", "value", "=", "value" ]
51.777778
16.222222
def update_machine_state(self, machine_state): """Updates the machine state in the VM process. Must be called only in certain cases (see the method implementation). in machine_state of type :class:`MachineState` raises :class:`VBoxErrorInvalidVmState` Session state prevents operation. raises :class:`VBoxErrorInvalidObjectState` Session type prevents operation. """ if not isinstance(machine_state, MachineState): raise TypeError("machine_state can only be an instance of type MachineState") self._call("updateMachineState", in_p=[machine_state])
[ "def", "update_machine_state", "(", "self", ",", "machine_state", ")", ":", "if", "not", "isinstance", "(", "machine_state", ",", "MachineState", ")", ":", "raise", "TypeError", "(", "\"machine_state can only be an instance of type MachineState\"", ")", "self", ".", "_call", "(", "\"updateMachineState\"", ",", "in_p", "=", "[", "machine_state", "]", ")" ]
37.722222
14.5
def supports_gzip(self, context): """ Looks at the RequestContext object and determines if the client supports gzip encoded content. If the client does, we will send them to the gzipped version of files that are allowed to be compressed. Clients without gzip support will be served the original media. """ if 'request' in context and client.supports_gzip(): enc = context['request'].META.get('HTTP_ACCEPT_ENCODING', '') return 'gzip' in enc and msettings['SERVE_REMOTE'] return False
[ "def", "supports_gzip", "(", "self", ",", "context", ")", ":", "if", "'request'", "in", "context", "and", "client", ".", "supports_gzip", "(", ")", ":", "enc", "=", "context", "[", "'request'", "]", ".", "META", ".", "get", "(", "'HTTP_ACCEPT_ENCODING'", ",", "''", ")", "return", "'gzip'", "in", "enc", "and", "msettings", "[", "'SERVE_REMOTE'", "]", "return", "False" ]
51
21.181818
def makeindex_runs(self, gloss_files): ''' Check for each glossary if it has to be regenerated with "makeindex". @return: True if "makeindex" was called. ''' gloss_changed = False for gloss in self.glossaries: make_gloss = False ext_i, ext_o = self.glossaries[gloss] fname_in = '%s.%s' % (self.project_name, ext_i) fname_out = '%s.%s' % (self.project_name, ext_o) if re.search('No file %s.' % fname_in, self.out): make_gloss = True if not os.path.isfile(fname_out): make_gloss = True else: with open(fname_out) as fobj: try: if gloss_files[gloss] != fobj.read(): make_gloss = True except KeyError: make_gloss = True if make_gloss: self.log.info('Running makeindex (%s)...' % gloss) try: cmd = ['makeindex', '-q', '-s', '%s.ist' % self.project_name, '-o', fname_in, fname_out] with open(os.devnull, 'w') as null: Popen(cmd, stdout=null).wait() except OSError: self.log.error(NO_LATEX_ERROR % 'makeindex') sys.exit(1) gloss_changed = True return gloss_changed
[ "def", "makeindex_runs", "(", "self", ",", "gloss_files", ")", ":", "gloss_changed", "=", "False", "for", "gloss", "in", "self", ".", "glossaries", ":", "make_gloss", "=", "False", "ext_i", ",", "ext_o", "=", "self", ".", "glossaries", "[", "gloss", "]", "fname_in", "=", "'%s.%s'", "%", "(", "self", ".", "project_name", ",", "ext_i", ")", "fname_out", "=", "'%s.%s'", "%", "(", "self", ".", "project_name", ",", "ext_o", ")", "if", "re", ".", "search", "(", "'No file %s.'", "%", "fname_in", ",", "self", ".", "out", ")", ":", "make_gloss", "=", "True", "if", "not", "os", ".", "path", ".", "isfile", "(", "fname_out", ")", ":", "make_gloss", "=", "True", "else", ":", "with", "open", "(", "fname_out", ")", "as", "fobj", ":", "try", ":", "if", "gloss_files", "[", "gloss", "]", "!=", "fobj", ".", "read", "(", ")", ":", "make_gloss", "=", "True", "except", "KeyError", ":", "make_gloss", "=", "True", "if", "make_gloss", ":", "self", ".", "log", ".", "info", "(", "'Running makeindex (%s)...'", "%", "gloss", ")", "try", ":", "cmd", "=", "[", "'makeindex'", ",", "'-q'", ",", "'-s'", ",", "'%s.ist'", "%", "self", ".", "project_name", ",", "'-o'", ",", "fname_in", ",", "fname_out", "]", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "null", ":", "Popen", "(", "cmd", ",", "stdout", "=", "null", ")", ".", "wait", "(", ")", "except", "OSError", ":", "self", ".", "log", ".", "error", "(", "NO_LATEX_ERROR", "%", "'makeindex'", ")", "sys", ".", "exit", "(", "1", ")", "gloss_changed", "=", "True", "return", "gloss_changed" ]
37.641026
15.282051
def main(argv=None): """ben-umb entry point""" arguments = cli_common(__doc__, argv=argv) driver = CampaignDriver(arguments['CAMPAIGN-DIR'], expandcampvars=False) driver(no_exec=True) if argv is not None: return driver
[ "def", "main", "(", "argv", "=", "None", ")", ":", "arguments", "=", "cli_common", "(", "__doc__", ",", "argv", "=", "argv", ")", "driver", "=", "CampaignDriver", "(", "arguments", "[", "'CAMPAIGN-DIR'", "]", ",", "expandcampvars", "=", "False", ")", "driver", "(", "no_exec", "=", "True", ")", "if", "argv", "is", "not", "None", ":", "return", "driver" ]
34.285714
16.142857
def apply_attributes(self, nc, table, prefix=''): """ apply fixed attributes, or look up attributes needed and apply them """ for name, value in sorted(table.items()): if name in nc.ncattrs(): LOG.debug('already have a value for %s' % name) continue if value is not None: setattr(nc, name, value) else: funcname = prefix+name # _global_ + product_tile_height func = getattr(self, funcname, None) if func is not None: value = func() if value is not None: setattr(nc, name, value) else: LOG.info('no routine matching %s' % funcname)
[ "def", "apply_attributes", "(", "self", ",", "nc", ",", "table", ",", "prefix", "=", "''", ")", ":", "for", "name", ",", "value", "in", "sorted", "(", "table", ".", "items", "(", ")", ")", ":", "if", "name", "in", "nc", ".", "ncattrs", "(", ")", ":", "LOG", ".", "debug", "(", "'already have a value for %s'", "%", "name", ")", "continue", "if", "value", "is", "not", "None", ":", "setattr", "(", "nc", ",", "name", ",", "value", ")", "else", ":", "funcname", "=", "prefix", "+", "name", "# _global_ + product_tile_height", "func", "=", "getattr", "(", "self", ",", "funcname", ",", "None", ")", "if", "func", "is", "not", "None", ":", "value", "=", "func", "(", ")", "if", "value", "is", "not", "None", ":", "setattr", "(", "nc", ",", "name", ",", "value", ")", "else", ":", "LOG", ".", "info", "(", "'no routine matching %s'", "%", "funcname", ")" ]
40.894737
12.263158
def crossmap(fas, reads, options, no_shrink, keepDB, threads, cluster, nodes): """ map all read sets against all fasta files """ if cluster is True: threads = '48' btc = [] for fa in fas: btd = bowtiedb(fa, keepDB) F, R, U = reads if F is not False: if U is False: u = False for i, f in enumerate(F): r = R[i] if U is not False: u = U[i] sam = '%s/%s-vs-%s' % (os.getcwd(), \ fa.rsplit('/', 1)[-1], f.rsplit('/', 1)[-1].rsplit('.', 3)[0]) btc.append(bowtie(sam, btd, f, r, u, options, no_shrink, threads)) else: f = False r = False for u in U: sam = '%s/%s-vs-%s' % (os.getcwd(), \ fa.rsplit('/', 1)[-1], u.rsplit('/', 1)[-1].rsplit('.', 3)[0]) btc.append(bowtie(sam, btd, f, r, u, options, no_shrink, threads)) if cluster is False: for i in btc: p = subprocess.Popen(i, shell = True) p.communicate() else: ID = ''.join(random.choice([str(i) for i in range(0, 9)]) for _ in range(5)) for node, commands in enumerate(chunks(btc, nodes), 1): bs = open('%s/crossmap-qsub.%s.%s.sh' % (os.getcwd(), ID, node), 'w') print('\n'.join(commands), file=bs) bs.close() p = subprocess.Popen(\ 'qsub -V -N crossmap %s' \ % (bs.name), \ shell = True) p.communicate()
[ "def", "crossmap", "(", "fas", ",", "reads", ",", "options", ",", "no_shrink", ",", "keepDB", ",", "threads", ",", "cluster", ",", "nodes", ")", ":", "if", "cluster", "is", "True", ":", "threads", "=", "'48'", "btc", "=", "[", "]", "for", "fa", "in", "fas", ":", "btd", "=", "bowtiedb", "(", "fa", ",", "keepDB", ")", "F", ",", "R", ",", "U", "=", "reads", "if", "F", "is", "not", "False", ":", "if", "U", "is", "False", ":", "u", "=", "False", "for", "i", ",", "f", "in", "enumerate", "(", "F", ")", ":", "r", "=", "R", "[", "i", "]", "if", "U", "is", "not", "False", ":", "u", "=", "U", "[", "i", "]", "sam", "=", "'%s/%s-vs-%s'", "%", "(", "os", ".", "getcwd", "(", ")", ",", "fa", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "-", "1", "]", ",", "f", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "-", "1", "]", ".", "rsplit", "(", "'.'", ",", "3", ")", "[", "0", "]", ")", "btc", ".", "append", "(", "bowtie", "(", "sam", ",", "btd", ",", "f", ",", "r", ",", "u", ",", "options", ",", "no_shrink", ",", "threads", ")", ")", "else", ":", "f", "=", "False", "r", "=", "False", "for", "u", "in", "U", ":", "sam", "=", "'%s/%s-vs-%s'", "%", "(", "os", ".", "getcwd", "(", ")", ",", "fa", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "-", "1", "]", ",", "u", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "-", "1", "]", ".", "rsplit", "(", "'.'", ",", "3", ")", "[", "0", "]", ")", "btc", ".", "append", "(", "bowtie", "(", "sam", ",", "btd", ",", "f", ",", "r", ",", "u", ",", "options", ",", "no_shrink", ",", "threads", ")", ")", "if", "cluster", "is", "False", ":", "for", "i", "in", "btc", ":", "p", "=", "subprocess", ".", "Popen", "(", "i", ",", "shell", "=", "True", ")", "p", ".", "communicate", "(", ")", "else", ":", "ID", "=", "''", ".", "join", "(", "random", ".", "choice", "(", "[", "str", "(", "i", ")", "for", "i", "in", "range", "(", "0", ",", "9", ")", "]", ")", "for", "_", "in", "range", "(", "5", ")", ")", "for", "node", ",", "commands", "in", "enumerate", "(", "chunks", "(", "btc", ",", "nodes", ")", ",", "1", ")", ":", "bs", "=", "open", "(", "'%s/crossmap-qsub.%s.%s.sh'", "%", "(", "os", ".", "getcwd", "(", ")", ",", "ID", ",", "node", ")", ",", "'w'", ")", "print", "(", "'\\n'", ".", "join", "(", "commands", ")", ",", "file", "=", "bs", ")", "bs", ".", "close", "(", ")", "p", "=", "subprocess", ".", "Popen", "(", "'qsub -V -N crossmap %s'", "%", "(", "bs", ".", "name", ")", ",", "shell", "=", "True", ")", "p", ".", "communicate", "(", ")" ]
37.952381
18.333333
def _kill_all_kids(self, sig): """ Kill all subprocesses (and its subprocesses) that executor started. This function tries to kill all leftovers in process tree that current executor may have left. It uses environment variable to recognise if process have origin in this Executor so it does not give 100 % and some daemons fired by subprocess may still be running. :param int sig: signal used to stop process run by executor. :return: process ids (pids) of killed processes :rtype list """ pids = processes_with_env(ENV_UUID, self._uuid) for pid in pids: log.debug("Killing process %d ...", pid) try: os.kill(pid, sig) except OSError as err: if err.errno in IGNORED_ERROR_CODES: # the process has died before we tried to kill it. pass else: raise log.debug("Killed process %d.", pid) return pids
[ "def", "_kill_all_kids", "(", "self", ",", "sig", ")", ":", "pids", "=", "processes_with_env", "(", "ENV_UUID", ",", "self", ".", "_uuid", ")", "for", "pid", "in", "pids", ":", "log", ".", "debug", "(", "\"Killing process %d ...\"", ",", "pid", ")", "try", ":", "os", ".", "kill", "(", "pid", ",", "sig", ")", "except", "OSError", "as", "err", ":", "if", "err", ".", "errno", "in", "IGNORED_ERROR_CODES", ":", "# the process has died before we tried to kill it.", "pass", "else", ":", "raise", "log", ".", "debug", "(", "\"Killed process %d.\"", ",", "pid", ")", "return", "pids" ]
39.692308
20
def set_user_timer(self, value, index): """Set the current value of a user timer.""" err = self.clock_manager.set_tick(index, value) return [err]
[ "def", "set_user_timer", "(", "self", ",", "value", ",", "index", ")", ":", "err", "=", "self", ".", "clock_manager", ".", "set_tick", "(", "index", ",", "value", ")", "return", "[", "err", "]" ]
33.2
15.2
def register_metas(self): """register metas""" # concatenate some attributes to global lists: aggregated = {'build': [], 'watch': []} for attribute, values in aggregated.items(): for info in self.info['analyses'] + [self.info]: if attribute in info: values.append(info[attribute]) for meta in self.metas: log.debug('Registering meta information {}'.format(meta.name)) # grab routes self.routes += [(r'/{}/{}'.format(meta.name, route), handler, data) for route, handler, data in meta.routes] # process files to watch for autoreload if aggregated['watch']: to_watch = [expr for w in aggregated['watch'] for expr in w] log.info('watching additional files: {}'.format(to_watch)) cwd = os.getcwd() os.chdir(self.analyses_path) if glob2: files = [os.path.join(self.analyses_path, fn) for expr in to_watch for fn in glob2.glob(expr)] else: files = [os.path.join(self.analyses_path, fn) for expr in to_watch for fn in glob.glob(expr)] if any('**' in expr for expr in to_watch): log.warning('Please run "pip install glob2" to properly ' 'process watch patterns with "**".') os.chdir(cwd) for fn in files: log.debug('watch file {}'.format(fn)) tornado.autoreload.watch(fn) # save build commands self.build_cmds = aggregated['build']
[ "def", "register_metas", "(", "self", ")", ":", "# concatenate some attributes to global lists:", "aggregated", "=", "{", "'build'", ":", "[", "]", ",", "'watch'", ":", "[", "]", "}", "for", "attribute", ",", "values", "in", "aggregated", ".", "items", "(", ")", ":", "for", "info", "in", "self", ".", "info", "[", "'analyses'", "]", "+", "[", "self", ".", "info", "]", ":", "if", "attribute", "in", "info", ":", "values", ".", "append", "(", "info", "[", "attribute", "]", ")", "for", "meta", "in", "self", ".", "metas", ":", "log", ".", "debug", "(", "'Registering meta information {}'", ".", "format", "(", "meta", ".", "name", ")", ")", "# grab routes", "self", ".", "routes", "+=", "[", "(", "r'/{}/{}'", ".", "format", "(", "meta", ".", "name", ",", "route", ")", ",", "handler", ",", "data", ")", "for", "route", ",", "handler", ",", "data", "in", "meta", ".", "routes", "]", "# process files to watch for autoreload", "if", "aggregated", "[", "'watch'", "]", ":", "to_watch", "=", "[", "expr", "for", "w", "in", "aggregated", "[", "'watch'", "]", "for", "expr", "in", "w", "]", "log", ".", "info", "(", "'watching additional files: {}'", ".", "format", "(", "to_watch", ")", ")", "cwd", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "self", ".", "analyses_path", ")", "if", "glob2", ":", "files", "=", "[", "os", ".", "path", ".", "join", "(", "self", ".", "analyses_path", ",", "fn", ")", "for", "expr", "in", "to_watch", "for", "fn", "in", "glob2", ".", "glob", "(", "expr", ")", "]", "else", ":", "files", "=", "[", "os", ".", "path", ".", "join", "(", "self", ".", "analyses_path", ",", "fn", ")", "for", "expr", "in", "to_watch", "for", "fn", "in", "glob", ".", "glob", "(", "expr", ")", "]", "if", "any", "(", "'**'", "in", "expr", "for", "expr", "in", "to_watch", ")", ":", "log", ".", "warning", "(", "'Please run \"pip install glob2\" to properly '", "'process watch patterns with \"**\".'", ")", "os", ".", "chdir", "(", "cwd", ")", "for", "fn", "in", "files", ":", "log", ".", "debug", "(", "'watch file {}'", ".", "format", "(", "fn", ")", ")", "tornado", ".", "autoreload", ".", "watch", "(", "fn", ")", "# save build commands", "self", ".", "build_cmds", "=", "aggregated", "[", "'build'", "]" ]
39.97619
20.357143
def copy(self): """ Creates a copy of the TT-matrix """ c = matrix() c.tt = self.tt.copy() c.n = self.n.copy() c.m = self.m.copy() return c
[ "def", "copy", "(", "self", ")", ":", "c", "=", "matrix", "(", ")", "c", ".", "tt", "=", "self", ".", "tt", ".", "copy", "(", ")", "c", ".", "n", "=", "self", ".", "n", ".", "copy", "(", ")", "c", ".", "m", "=", "self", ".", "m", ".", "copy", "(", ")", "return", "c" ]
25.857143
15.142857
def add_result(self, content_id, content, **kwargs): """"Add a result to the list of results.""" result_object = self.ResultClass(content_id, self.name, self.version, formats.JSON, content, **kwargs) self.results.append(result_object) return
[ "def", "add_result", "(", "self", ",", "content_id", ",", "content", ",", "*", "*", "kwargs", ")", ":", "result_object", "=", "self", ".", "ResultClass", "(", "content_id", ",", "self", ".", "name", ",", "self", ".", "version", ",", "formats", ".", "JSON", ",", "content", ",", "*", "*", "kwargs", ")", "self", ".", "results", ".", "append", "(", "result_object", ")", "return" ]
51.5
18.333333
def match_main(self, text, pattern, loc): """Locate the best instance of 'pattern' in 'text' near 'loc'. Args: text: The text to search. pattern: The pattern to search for. loc: The location to search around. Returns: Best match index or -1. """ # Check for null inputs. if text == None or pattern == None: raise ValueError("Null inputs. (match_main)") loc = max(0, min(loc, len(text))) if text == pattern: # Shortcut (potentially not guaranteed by the algorithm) return 0 elif not text: # Nothing to match. return -1 elif text[loc:loc + len(pattern)] == pattern: # Perfect match at the perfect spot! (Includes case of null pattern) return loc else: # Do a fuzzy compare. match = self.match_bitap(text, pattern, loc) return match
[ "def", "match_main", "(", "self", ",", "text", ",", "pattern", ",", "loc", ")", ":", "# Check for null inputs.", "if", "text", "==", "None", "or", "pattern", "==", "None", ":", "raise", "ValueError", "(", "\"Null inputs. (match_main)\"", ")", "loc", "=", "max", "(", "0", ",", "min", "(", "loc", ",", "len", "(", "text", ")", ")", ")", "if", "text", "==", "pattern", ":", "# Shortcut (potentially not guaranteed by the algorithm)", "return", "0", "elif", "not", "text", ":", "# Nothing to match.", "return", "-", "1", "elif", "text", "[", "loc", ":", "loc", "+", "len", "(", "pattern", ")", "]", "==", "pattern", ":", "# Perfect match at the perfect spot! (Includes case of null pattern)", "return", "loc", "else", ":", "# Do a fuzzy compare.", "match", "=", "self", ".", "match_bitap", "(", "text", ",", "pattern", ",", "loc", ")", "return", "match" ]
28.724138
17.241379
def _validate_label(cls, name, value): """Raise ValueError if the label is invalid.""" # Rules for labels are described in: # https://cloud.google.com/compute/docs/labeling-resources#restrictions # * Keys and values cannot be longer than 63 characters each. # * Keys and values can only contain lowercase letters, numeric characters, # underscores, and dashes. # * International characters are allowed. # * Label keys must start with a lowercase letter and international # characters are allowed. # * Label keys cannot be empty. cls._check_label_name(name) cls._check_label_value(value) # Ensure that reserved labels are not being used. if not cls._allow_reserved_keys and name in RESERVED_LABELS: raise ValueError('Label flag (%s=...) must not use reserved keys: %r' % (name, list(RESERVED_LABELS)))
[ "def", "_validate_label", "(", "cls", ",", "name", ",", "value", ")", ":", "# Rules for labels are described in:", "# https://cloud.google.com/compute/docs/labeling-resources#restrictions", "# * Keys and values cannot be longer than 63 characters each.", "# * Keys and values can only contain lowercase letters, numeric characters,", "# underscores, and dashes.", "# * International characters are allowed.", "# * Label keys must start with a lowercase letter and international", "# characters are allowed.", "# * Label keys cannot be empty.", "cls", ".", "_check_label_name", "(", "name", ")", "cls", ".", "_check_label_value", "(", "value", ")", "# Ensure that reserved labels are not being used.", "if", "not", "cls", ".", "_allow_reserved_keys", "and", "name", "in", "RESERVED_LABELS", ":", "raise", "ValueError", "(", "'Label flag (%s=...) must not use reserved keys: %r'", "%", "(", "name", ",", "list", "(", "RESERVED_LABELS", ")", ")", ")" ]
46
18.052632
def fn_signature(callable, argument_transform=(lambda name: name), default_transform=(lambda name, value: "%s=%s" % (name, repr(value))), vararg_transform=(lambda name: "*" + name), kwargs_transform=(lambda name: "**" + name)): """Returns the signature of the provided callable as a tuple of strings.""" signature = [] fn = get_fn(callable) avail_ac = fn_available_argcount(fn) kwargs = fn_kwargs(fn) argnames = fn_argnames(fn) for name in stop_at(argnames, avail_ac): if name in kwargs: signature.append(default_transform(name, kwargs[name])) else: signature.append(argument_transform(name)) if fn_has_args(fn): if fn_has_kwargs(fn): signature.append(vararg_transform(argnames[-2])) signature.append(kwargs_transform(argnames[-1])) else: signature.append(vararg_transform(argnames[-1])) elif fn_has_kwargs(fn): signature.append(kwargs_transform(argnames[-1])) return signature
[ "def", "fn_signature", "(", "callable", ",", "argument_transform", "=", "(", "lambda", "name", ":", "name", ")", ",", "default_transform", "=", "(", "lambda", "name", ",", "value", ":", "\"%s=%s\"", "%", "(", "name", ",", "repr", "(", "value", ")", ")", ")", ",", "vararg_transform", "=", "(", "lambda", "name", ":", "\"*\"", "+", "name", ")", ",", "kwargs_transform", "=", "(", "lambda", "name", ":", "\"**\"", "+", "name", ")", ")", ":", "signature", "=", "[", "]", "fn", "=", "get_fn", "(", "callable", ")", "avail_ac", "=", "fn_available_argcount", "(", "fn", ")", "kwargs", "=", "fn_kwargs", "(", "fn", ")", "argnames", "=", "fn_argnames", "(", "fn", ")", "for", "name", "in", "stop_at", "(", "argnames", ",", "avail_ac", ")", ":", "if", "name", "in", "kwargs", ":", "signature", ".", "append", "(", "default_transform", "(", "name", ",", "kwargs", "[", "name", "]", ")", ")", "else", ":", "signature", ".", "append", "(", "argument_transform", "(", "name", ")", ")", "if", "fn_has_args", "(", "fn", ")", ":", "if", "fn_has_kwargs", "(", "fn", ")", ":", "signature", ".", "append", "(", "vararg_transform", "(", "argnames", "[", "-", "2", "]", ")", ")", "signature", ".", "append", "(", "kwargs_transform", "(", "argnames", "[", "-", "1", "]", ")", ")", "else", ":", "signature", ".", "append", "(", "vararg_transform", "(", "argnames", "[", "-", "1", "]", ")", ")", "elif", "fn_has_kwargs", "(", "fn", ")", ":", "signature", ".", "append", "(", "kwargs_transform", "(", "argnames", "[", "-", "1", "]", ")", ")", "return", "signature" ]
41.407407
17.962963
def labels(self, pores=[], throats=[], element=None, mode='union'): r""" Returns a list of labels present on the object Additionally, this function can return labels applied to a specified set of pores or throats Parameters ---------- element : string Controls whether pore or throat labels are returned. If empty then both are returned (default). pores (or throats) : array_like The pores (or throats) whose labels are sought. If left empty a list containing all pore and throat labels is returned. mode : string, optional Controls how the query should be performed. Only applicable when ``pores`` or ``throats`` are specified: **'or', 'union', 'any'**: (default) Returns the labels that are assigned to *any* of the given locations. **'and', 'intersection', 'all'**: Labels that are present on *all* the given locations. **'xor', 'exclusive_or'** : Labels that are present on *only one* of the given locations. **'nor', 'none', 'not'**: Labels that are *not* present on any of the given locations. **'nand'**: Labels that are present on *all but one* of the given locations **'xnor'**: Labels that are present on *more than one* of the given locations. 'nxor' is also accepted. Returns ------- A list containing the labels on the object. If ``pores`` or ``throats`` are given, the results are filtered according to the specified ``mode``. See Also -------- props keys Notes ----- Technically, *'nand'* and *'xnor'* should also return pores with *none* of the labels but these are not included. This makes the returned list more useful. Examples -------- >>> import openpnm as op >>> pn = op.network.Cubic(shape=[5, 5, 5]) >>> pn.labels(pores=[11, 12]) ['pore.all', 'pore.front', 'pore.internal', 'pore.surface'] """ # Short-circuit query when no pores or throats are given if (sp.size(pores) == 0) and (sp.size(throats) == 0): labels = PrintableList(self.keys(element=element, mode='labels')) elif (sp.size(pores) > 0) and (sp.size(throats) > 0): raise Exception('Cannot perform label query on pores and ' + 'throats simultaneously') elif sp.size(pores) > 0: labels = self._get_labels(element='pore', locations=pores, mode=mode) elif sp.size(throats) > 0: labels = self._get_labels(element='throat', locations=throats, mode=mode) return labels
[ "def", "labels", "(", "self", ",", "pores", "=", "[", "]", ",", "throats", "=", "[", "]", ",", "element", "=", "None", ",", "mode", "=", "'union'", ")", ":", "# Short-circuit query when no pores or throats are given", "if", "(", "sp", ".", "size", "(", "pores", ")", "==", "0", ")", "and", "(", "sp", ".", "size", "(", "throats", ")", "==", "0", ")", ":", "labels", "=", "PrintableList", "(", "self", ".", "keys", "(", "element", "=", "element", ",", "mode", "=", "'labels'", ")", ")", "elif", "(", "sp", ".", "size", "(", "pores", ")", ">", "0", ")", "and", "(", "sp", ".", "size", "(", "throats", ")", ">", "0", ")", ":", "raise", "Exception", "(", "'Cannot perform label query on pores and '", "+", "'throats simultaneously'", ")", "elif", "sp", ".", "size", "(", "pores", ")", ">", "0", ":", "labels", "=", "self", ".", "_get_labels", "(", "element", "=", "'pore'", ",", "locations", "=", "pores", ",", "mode", "=", "mode", ")", "elif", "sp", ".", "size", "(", "throats", ")", ">", "0", ":", "labels", "=", "self", ".", "_get_labels", "(", "element", "=", "'throat'", ",", "locations", "=", "throats", ",", "mode", "=", "mode", ")", "return", "labels" ]
37.447368
24.802632
def get_state(self, key): """ Get the state connected to a given key. :param key: Key into the state database :return: A :py:class:´oidcservice.state_interface.State` instance """ _data = self.state_db.get(key) if not _data: raise KeyError(key) else: return State().from_json(_data)
[ "def", "get_state", "(", "self", ",", "key", ")", ":", "_data", "=", "self", ".", "state_db", ".", "get", "(", "key", ")", "if", "not", "_data", ":", "raise", "KeyError", "(", "key", ")", "else", ":", "return", "State", "(", ")", ".", "from_json", "(", "_data", ")" ]
30
13.5
def off(self, event, handler=None): """Remove an event or a handler from it.""" if handler: self.events[event].off(handler) else: del self.events[event] delattr(self, event)
[ "def", "off", "(", "self", ",", "event", ",", "handler", "=", "None", ")", ":", "if", "handler", ":", "self", ".", "events", "[", "event", "]", ".", "off", "(", "handler", ")", "else", ":", "del", "self", ".", "events", "[", "event", "]", "delattr", "(", "self", ",", "event", ")" ]
32.428571
10
def ignore_nan_inf(kde_method): """Ignores nans and infs from the input data Invalid positions in the resulting density are set to nan. """ def new_kde_method(events_x, events_y, xout=None, yout=None, *args, **kwargs): bad_in = get_bad_vals(events_x, events_y) if xout is None: density = np.zeros_like(events_x, dtype=float) bad_out = bad_in xo = yo = None else: density = np.zeros_like(xout, dtype=float) bad_out = get_bad_vals(xout, yout) xo = xout[~bad_out] yo = yout[~bad_out] # Filter events ev_x = events_x[~bad_in] ev_y = events_y[~bad_in] density[~bad_out] = kde_method(ev_x, ev_y, xo, yo, *args, **kwargs) density[bad_out] = np.nan return density doc_add = "\n Notes\n" +\ " -----\n" +\ " This is a wrapped version that ignores nan and inf values." new_kde_method.__doc__ = kde_method.__doc__ + doc_add return new_kde_method
[ "def", "ignore_nan_inf", "(", "kde_method", ")", ":", "def", "new_kde_method", "(", "events_x", ",", "events_y", ",", "xout", "=", "None", ",", "yout", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "bad_in", "=", "get_bad_vals", "(", "events_x", ",", "events_y", ")", "if", "xout", "is", "None", ":", "density", "=", "np", ".", "zeros_like", "(", "events_x", ",", "dtype", "=", "float", ")", "bad_out", "=", "bad_in", "xo", "=", "yo", "=", "None", "else", ":", "density", "=", "np", ".", "zeros_like", "(", "xout", ",", "dtype", "=", "float", ")", "bad_out", "=", "get_bad_vals", "(", "xout", ",", "yout", ")", "xo", "=", "xout", "[", "~", "bad_out", "]", "yo", "=", "yout", "[", "~", "bad_out", "]", "# Filter events", "ev_x", "=", "events_x", "[", "~", "bad_in", "]", "ev_y", "=", "events_y", "[", "~", "bad_in", "]", "density", "[", "~", "bad_out", "]", "=", "kde_method", "(", "ev_x", ",", "ev_y", ",", "xo", ",", "yo", ",", "*", "args", ",", "*", "*", "kwargs", ")", "density", "[", "bad_out", "]", "=", "np", ".", "nan", "return", "density", "doc_add", "=", "\"\\n Notes\\n\"", "+", "\" -----\\n\"", "+", "\" This is a wrapped version that ignores nan and inf values.\"", "new_kde_method", ".", "__doc__", "=", "kde_method", ".", "__doc__", "+", "doc_add", "return", "new_kde_method" ]
35.21875
15.1875
def _ProduceContent(self, mods, showprivate=False, showinh=False): """An internal helper to create pages for several modules that do not have nested modules. This will automatically generate the needed RSF to document each module module and save the module to its own page appropriately. Args: mods (module): The modules to document that do not contain nested modules showprivate (bool): A flag for whether or not to display private members Returns: str: The file names ready to be appended to a toctree """ result = '' nestedresult = '' # For each module for mod in mods: # Test to see if module to document has an __all__ variable try: all = mod[1].__all__ except AttributeError: raise RuntimeError('Module (%s) MUST have `__all__` defined.' % mod[1].__name__) if not showprivate and mod[0][0:1] == '_': continue if mod[0][0:2] == '__': #and not showprivate continue result += self._ProduceSingleContent(mod, showprivate, showinh) return result
[ "def", "_ProduceContent", "(", "self", ",", "mods", ",", "showprivate", "=", "False", ",", "showinh", "=", "False", ")", ":", "result", "=", "''", "nestedresult", "=", "''", "# For each module", "for", "mod", "in", "mods", ":", "# Test to see if module to document has an __all__ variable", "try", ":", "all", "=", "mod", "[", "1", "]", ".", "__all__", "except", "AttributeError", ":", "raise", "RuntimeError", "(", "'Module (%s) MUST have `__all__` defined.'", "%", "mod", "[", "1", "]", ".", "__name__", ")", "if", "not", "showprivate", "and", "mod", "[", "0", "]", "[", "0", ":", "1", "]", "==", "'_'", ":", "continue", "if", "mod", "[", "0", "]", "[", "0", ":", "2", "]", "==", "'__'", ":", "#and not showprivate", "continue", "result", "+=", "self", ".", "_ProduceSingleContent", "(", "mod", ",", "showprivate", ",", "showinh", ")", "return", "result" ]
42.214286
24.25
def main(): """Parse parameters and run ms bot framework""" args = parser.parse_args() run_ms_bot_framework_server(agent_generator=make_agent, app_id=args.ms_id, app_secret=args.ms_secret, stateful=True)
[ "def", "main", "(", ")", ":", "args", "=", "parser", ".", "parse_args", "(", ")", "run_ms_bot_framework_server", "(", "agent_generator", "=", "make_agent", ",", "app_id", "=", "args", ".", "ms_id", ",", "app_secret", "=", "args", ".", "ms_secret", ",", "stateful", "=", "True", ")" ]
38.125
16.5
def set_geometry(self, crect): """Set geometry for floating panels. Normally you don't need to override this method, you should override `geometry` instead. """ x0, y0, width, height = self.geometry() if width is None: width = crect.width() if height is None: height = crect.height() # Calculate editor coordinates with their offsets offset = self.editor.contentOffset() x = self.editor.blockBoundingGeometry(self.editor.firstVisibleBlock())\ .translated(offset.x(), offset.y()).left() \ + self.editor.document().documentMargin() \ + self.editor.panels.margin_size(Panel.Position.LEFT) y = crect.top() + self.editor.panels.margin_size(Panel.Position.TOP) self.setGeometry(QRect(x+x0, y+y0, width, height))
[ "def", "set_geometry", "(", "self", ",", "crect", ")", ":", "x0", ",", "y0", ",", "width", ",", "height", "=", "self", ".", "geometry", "(", ")", "if", "width", "is", "None", ":", "width", "=", "crect", ".", "width", "(", ")", "if", "height", "is", "None", ":", "height", "=", "crect", ".", "height", "(", ")", "# Calculate editor coordinates with their offsets", "offset", "=", "self", ".", "editor", ".", "contentOffset", "(", ")", "x", "=", "self", ".", "editor", ".", "blockBoundingGeometry", "(", "self", ".", "editor", ".", "firstVisibleBlock", "(", ")", ")", ".", "translated", "(", "offset", ".", "x", "(", ")", ",", "offset", ".", "y", "(", ")", ")", ".", "left", "(", ")", "+", "self", ".", "editor", ".", "document", "(", ")", ".", "documentMargin", "(", ")", "+", "self", ".", "editor", ".", "panels", ".", "margin_size", "(", "Panel", ".", "Position", ".", "LEFT", ")", "y", "=", "crect", ".", "top", "(", ")", "+", "self", ".", "editor", ".", "panels", ".", "margin_size", "(", "Panel", ".", "Position", ".", "TOP", ")", "self", ".", "setGeometry", "(", "QRect", "(", "x", "+", "x0", ",", "y", "+", "y0", ",", "width", ",", "height", ")", ")" ]
38.363636
19.863636
def json_int_dttm_ser(obj): """json serializer that deals with dates""" val = base_json_conv(obj) if val is not None: return val if isinstance(obj, (datetime, pd.Timestamp)): obj = datetime_to_epoch(obj) elif isinstance(obj, date): obj = (obj - EPOCH.date()).total_seconds() * 1000 else: raise TypeError( 'Unserializable object {} of type {}'.format(obj, type(obj))) return obj
[ "def", "json_int_dttm_ser", "(", "obj", ")", ":", "val", "=", "base_json_conv", "(", "obj", ")", "if", "val", "is", "not", "None", ":", "return", "val", "if", "isinstance", "(", "obj", ",", "(", "datetime", ",", "pd", ".", "Timestamp", ")", ")", ":", "obj", "=", "datetime_to_epoch", "(", "obj", ")", "elif", "isinstance", "(", "obj", ",", "date", ")", ":", "obj", "=", "(", "obj", "-", "EPOCH", ".", "date", "(", ")", ")", ".", "total_seconds", "(", ")", "*", "1000", "else", ":", "raise", "TypeError", "(", "'Unserializable object {} of type {}'", ".", "format", "(", "obj", ",", "type", "(", "obj", ")", ")", ")", "return", "obj" ]
33.615385
16
def available_configuration_files(self): """A list of strings with the absolute pathnames of the available configuration files.""" known_files = [GLOBAL_CONFIG, LOCAL_CONFIG, self.environment.get('PIP_ACCEL_CONFIG')] absolute_paths = [parse_path(pathname) for pathname in known_files if pathname] return [pathname for pathname in absolute_paths if os.path.isfile(pathname)]
[ "def", "available_configuration_files", "(", "self", ")", ":", "known_files", "=", "[", "GLOBAL_CONFIG", ",", "LOCAL_CONFIG", ",", "self", ".", "environment", ".", "get", "(", "'PIP_ACCEL_CONFIG'", ")", "]", "absolute_paths", "=", "[", "parse_path", "(", "pathname", ")", "for", "pathname", "in", "known_files", "if", "pathname", "]", "return", "[", "pathname", "for", "pathname", "in", "absolute_paths", "if", "os", ".", "path", ".", "isfile", "(", "pathname", ")", "]" ]
80.2
28.8
def user_auth( self, cloudflare_email=None, cloudflare_pass=None, unique_id=None ): """ Get user_key based on either his email and password or unique_id. :param cloudflare_email: email associated with user :type cloudflare_email: str :param cloudflare_pass: pass associated with user :type cloudflare_pass: str :param unique_id: unique id associated with user :type unique_id: str :returns: :rtype: dict """ if not (cloudflare_email and cloudflare_pass) and not unique_id: raise KeyError( 'Either cloudflare_email and cloudflare_pass or unique_id must be present') params = {'act': 'user_auth'} if cloudflare_email and cloudflare_pass: params['cloudflare_email'] = cloudflare_email params['cloudflare_pass'] = cloudflare_pass if unique_id: params['unique_id'] = unique_id return self._request(params)
[ "def", "user_auth", "(", "self", ",", "cloudflare_email", "=", "None", ",", "cloudflare_pass", "=", "None", ",", "unique_id", "=", "None", ")", ":", "if", "not", "(", "cloudflare_email", "and", "cloudflare_pass", ")", "and", "not", "unique_id", ":", "raise", "KeyError", "(", "'Either cloudflare_email and cloudflare_pass or unique_id must be present'", ")", "params", "=", "{", "'act'", ":", "'user_auth'", "}", "if", "cloudflare_email", "and", "cloudflare_pass", ":", "params", "[", "'cloudflare_email'", "]", "=", "cloudflare_email", "params", "[", "'cloudflare_pass'", "]", "=", "cloudflare_pass", "if", "unique_id", ":", "params", "[", "'unique_id'", "]", "=", "unique_id", "return", "self", ".", "_request", "(", "params", ")" ]
34.9
18.3
def populate_from_local(self, sa, container, path, mode, cache_control): # type: (StorageEntity, blobxfer.operations.azure.StorageAccount # str, str, blobxfer.models.azure.StorageModes, str) -> None """Populate properties from local :param StorageEntity self: this :param blobxfer.operations.azure.StorageAccount sa: storage account :param str container: container :param str path: full path to file :param blobxfer.models.azure.StorageModes mode: storage mode :param str cache_control: cache control """ self._can_create_containers = sa.can_create_containers self._container = container self._name = path self._mode = mode self._cache_control = cache_control self._from_local = True if mode == StorageModes.Append: self._client = sa.append_blob_client elif mode == StorageModes.Block: self._client = sa.block_blob_client elif mode == StorageModes.File: self._client = sa.file_client elif mode == StorageModes.Page: self._client = sa.page_blob_client elif mode == StorageModes.Auto: name = self.name.lower() if name.endswith('.vhd') or name.endswith('.vhdx'): self._client = sa.page_blob_client self._mode = StorageModes.Page else: self._client = sa.block_blob_client self._mode = StorageModes.Block
[ "def", "populate_from_local", "(", "self", ",", "sa", ",", "container", ",", "path", ",", "mode", ",", "cache_control", ")", ":", "# type: (StorageEntity, blobxfer.operations.azure.StorageAccount", "# str, str, blobxfer.models.azure.StorageModes, str) -> None", "self", ".", "_can_create_containers", "=", "sa", ".", "can_create_containers", "self", ".", "_container", "=", "container", "self", ".", "_name", "=", "path", "self", ".", "_mode", "=", "mode", "self", ".", "_cache_control", "=", "cache_control", "self", ".", "_from_local", "=", "True", "if", "mode", "==", "StorageModes", ".", "Append", ":", "self", ".", "_client", "=", "sa", ".", "append_blob_client", "elif", "mode", "==", "StorageModes", ".", "Block", ":", "self", ".", "_client", "=", "sa", ".", "block_blob_client", "elif", "mode", "==", "StorageModes", ".", "File", ":", "self", ".", "_client", "=", "sa", ".", "file_client", "elif", "mode", "==", "StorageModes", ".", "Page", ":", "self", ".", "_client", "=", "sa", ".", "page_blob_client", "elif", "mode", "==", "StorageModes", ".", "Auto", ":", "name", "=", "self", ".", "name", ".", "lower", "(", ")", "if", "name", ".", "endswith", "(", "'.vhd'", ")", "or", "name", ".", "endswith", "(", "'.vhdx'", ")", ":", "self", ".", "_client", "=", "sa", ".", "page_blob_client", "self", ".", "_mode", "=", "StorageModes", ".", "Page", "else", ":", "self", ".", "_client", "=", "sa", ".", "block_blob_client", "self", ".", "_mode", "=", "StorageModes", ".", "Block" ]
45.151515
10.666667
def FromBinary(cls, record_data, record_count=1): """Create an UpdateRecord subclass from binary record data. This should be called with a binary record blob (NOT including the record type header) and it will decode it into a SendRPCRecord. Args: record_data (bytearray): The raw record data that we wish to parse into an UpdateRecord subclass NOT including its 8 byte record header. record_count (int): The number of records included in record_data. Raises: ArgumentError: If the record_data is malformed and cannot be parsed. Returns: SendRPCRecord: The decoded reflash tile record. """ cmd, address, resp_length, payload = cls._parse_rpc_info(record_data) # The first bit is 1 if we do not have a fixed length # The next 7 bits encode the fixed length if we do have a fixed length fixed_length = resp_length >> 1 if resp_length & 0b1: fixed_length = None return cls(address, cmd, payload, fixed_length)
[ "def", "FromBinary", "(", "cls", ",", "record_data", ",", "record_count", "=", "1", ")", ":", "cmd", ",", "address", ",", "resp_length", ",", "payload", "=", "cls", ".", "_parse_rpc_info", "(", "record_data", ")", "# The first bit is 1 if we do not have a fixed length", "# The next 7 bits encode the fixed length if we do have a fixed length", "fixed_length", "=", "resp_length", ">>", "1", "if", "resp_length", "&", "0b1", ":", "fixed_length", "=", "None", "return", "cls", "(", "address", ",", "cmd", ",", "payload", ",", "fixed_length", ")" ]
39.481481
27.481481
def get_glance_url(url_base, tenant_id, user, password, region): """It get the glance url :param url_base: keystone url :param tenand_id: the id of the tenant :param user: the user :param paassword: the password """ get_url(url_base, tenant_id, user, password, 'image', region)
[ "def", "get_glance_url", "(", "url_base", ",", "tenant_id", ",", "user", ",", "password", ",", "region", ")", ":", "get_url", "(", "url_base", ",", "tenant_id", ",", "user", ",", "password", ",", "'image'", ",", "region", ")" ]
37.25
9.875
def read_to_file(library, session, filename, count): """Read data synchronously, and store the transferred data in a file. Corresponds to viReadToFile function of the VISA library. :param library: the visa library wrapped by ctypes. :param session: Unique logical identifier to a session. :param filename: Name of file to which data will be written. :param count: Number of bytes to be read. :return: Number of bytes actually transferred, return value of the library call. :rtype: int, :class:`pyvisa.constants.StatusCode` """ return_count = ViUInt32() ret = library.viReadToFile(session, filename, count, return_count) return return_count, ret
[ "def", "read_to_file", "(", "library", ",", "session", ",", "filename", ",", "count", ")", ":", "return_count", "=", "ViUInt32", "(", ")", "ret", "=", "library", ".", "viReadToFile", "(", "session", ",", "filename", ",", "count", ",", "return_count", ")", "return", "return_count", ",", "ret" ]
45.333333
19.066667
def set_option(name, option): """ Set the given LLVM "command-line" option. For example set_option("test", "-debug-pass=Structure") would display all optimization passes when generating code. """ ffi.lib.LLVMPY_SetCommandLine(_encode_string(name), _encode_string(option))
[ "def", "set_option", "(", "name", ",", "option", ")", ":", "ffi", ".", "lib", ".", "LLVMPY_SetCommandLine", "(", "_encode_string", "(", "name", ")", ",", "_encode_string", "(", "option", ")", ")" ]
35.777778
14.444444
def find_elb_dns_zone_id(name='', env='dev', region='us-east-1'): """Get an application's AWS elb dns zone id. Args: name (str): ELB name env (str): Environment/account of ELB region (str): AWS Region Returns: str: elb DNS zone ID """ LOG.info('Find %s ELB DNS Zone ID in %s [%s].', name, env, region) client = boto3.Session(profile_name=env).client('elb', region_name=region) elbs = client.describe_load_balancers(LoadBalancerNames=[name]) return elbs['LoadBalancerDescriptions'][0]['CanonicalHostedZoneNameID']
[ "def", "find_elb_dns_zone_id", "(", "name", "=", "''", ",", "env", "=", "'dev'", ",", "region", "=", "'us-east-1'", ")", ":", "LOG", ".", "info", "(", "'Find %s ELB DNS Zone ID in %s [%s].'", ",", "name", ",", "env", ",", "region", ")", "client", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", ".", "client", "(", "'elb'", ",", "region_name", "=", "region", ")", "elbs", "=", "client", ".", "describe_load_balancers", "(", "LoadBalancerNames", "=", "[", "name", "]", ")", "return", "elbs", "[", "'LoadBalancerDescriptions'", "]", "[", "0", "]", "[", "'CanonicalHostedZoneNameID'", "]" ]
35.25
23.1875
def _parseCounters(self, data): """Parse simple stats list of key, value pairs. @param data: Multiline data with one key-value pair in each line. @return: Dictionary of stats. """ info_dict = util.NestedDict() for line in data.splitlines(): mobj = re.match('^\s*([\w\.]+)\s*=\s*(\S.*)$', line) if mobj: (key, value) = mobj.groups() klist = key.split('.') info_dict.set_nested(klist, parse_value(value)) return info_dict
[ "def", "_parseCounters", "(", "self", ",", "data", ")", ":", "info_dict", "=", "util", ".", "NestedDict", "(", ")", "for", "line", "in", "data", ".", "splitlines", "(", ")", ":", "mobj", "=", "re", ".", "match", "(", "'^\\s*([\\w\\.]+)\\s*=\\s*(\\S.*)$'", ",", "line", ")", "if", "mobj", ":", "(", "key", ",", "value", ")", "=", "mobj", ".", "groups", "(", ")", "klist", "=", "key", ".", "split", "(", "'.'", ")", "info_dict", ".", "set_nested", "(", "klist", ",", "parse_value", "(", "value", ")", ")", "return", "info_dict" ]
37.266667
13.133333
def get_gallery_profile(self): """Return the users gallery profile.""" url = (self._imgur._base_url + "/3/account/{0}/" "gallery_profile".format(self.name)) return self._imgur._send_request(url)
[ "def", "get_gallery_profile", "(", "self", ")", ":", "url", "=", "(", "self", ".", "_imgur", ".", "_base_url", "+", "\"/3/account/{0}/\"", "\"gallery_profile\"", ".", "format", "(", "self", ".", "name", ")", ")", "return", "self", ".", "_imgur", ".", "_send_request", "(", "url", ")" ]
45.8
8.4
def parse_auth_headers(self, authorization): """Parses the authorization headers from the authorization header taken from a request. Returns a dict that is accepted by all other API functions which expect authorization headers in a dict format. Keyword arguments: authorization -- The authorization header of any request. The header must be in a format understood by the signer. """ m = re.match(r'^(?i)Acquia\s+(.*?):(.+)$', authorization) if m is not None: return {"id": m.group(1), "signature": m.group(2)} return {}
[ "def", "parse_auth_headers", "(", "self", ",", "authorization", ")", ":", "m", "=", "re", ".", "match", "(", "r'^(?i)Acquia\\s+(.*?):(.+)$'", ",", "authorization", ")", "if", "m", "is", "not", "None", ":", "return", "{", "\"id\"", ":", "m", ".", "group", "(", "1", ")", ",", "\"signature\"", ":", "m", ".", "group", "(", "2", ")", "}", "return", "{", "}" ]
53.272727
27.636364
def _set_action(self, v, load=False): """ Setter method for action, mapped from YANG variable /rule/action (rule-action) If this variable is read-only (config: false) in the source YANG file, then _set_action is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_action() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'accept': {}, u'reject': {}},), is_leaf=True, yang_name="action", rest_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Action for the command', u'cli-optional-in-sequence': None}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='rule-action', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """action must be of a type compatible with rule-action""", 'defined-type': "brocade-aaa:rule-action", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'accept': {}, u'reject': {}},), is_leaf=True, yang_name="action", rest_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Action for the command', u'cli-optional-in-sequence': None}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='rule-action', is_config=True)""", }) self.__action = t if hasattr(self, '_set'): self._set()
[ "def", "_set_action", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "base", "=", "RestrictedClassType", "(", "base_type", "=", "unicode", ",", "restriction_type", "=", "\"dict_key\"", ",", "restriction_arg", "=", "{", "u'accept'", ":", "{", "}", ",", "u'reject'", ":", "{", "}", "}", ",", ")", ",", "is_leaf", "=", "True", ",", "yang_name", "=", "\"action\"", ",", "rest_name", "=", "\"action\"", ",", "parent", "=", "self", ",", "path_helper", "=", "self", ".", "_path_helper", ",", "extmethods", "=", "self", ".", "_extmethods", ",", "register_paths", "=", "True", ",", "extensions", "=", "{", "u'tailf-common'", ":", "{", "u'info'", ":", "u'Action for the command'", ",", "u'cli-optional-in-sequence'", ":", "None", "}", "}", ",", "namespace", "=", "'urn:brocade.com:mgmt:brocade-aaa'", ",", "defining_module", "=", "'brocade-aaa'", ",", "yang_type", "=", "'rule-action'", ",", "is_config", "=", "True", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "ValueError", "(", "{", "'error-string'", ":", "\"\"\"action must be of a type compatible with rule-action\"\"\"", ",", "'defined-type'", ":", "\"brocade-aaa:rule-action\"", ",", "'generated-type'", ":", "\"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'accept': {}, u'reject': {}},), is_leaf=True, yang_name=\"action\", rest_name=\"action\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Action for the command', u'cli-optional-in-sequence': None}}, namespace='urn:brocade.com:mgmt:brocade-aaa', defining_module='brocade-aaa', yang_type='rule-action', is_config=True)\"\"\"", ",", "}", ")", "self", ".", "__action", "=", "t", "if", "hasattr", "(", "self", ",", "'_set'", ")", ":", "self", ".", "_set", "(", ")" ]
86.227273
40.318182
def rights(self, form, load): ''' Determine what type of authentication is being requested and pass authorization Note: this will check that the user has at least one right that will let the user execute "load", this does not deal with conflicting rules ''' adata = self.auth_data good = False if load.get('token', False): for sub_auth in self.token(self.auth_data, load): if sub_auth: if self.rights_check( form, self.auth_data[sub_auth['token']['eauth']], sub_auth['token']['name'], load, sub_auth['token']['eauth']): return True log.warning( 'Authentication failure of type "token" occurred.' ) elif load.get('eauth'): for sub_auth in self.eauth(self.auth_data, load): if sub_auth: if self.rights_check( form, sub_auth['sub_auth'], sub_auth['name'], load, load['eauth']): return True log.warning( 'Authentication failure of type "eauth" occurred.' ) return False
[ "def", "rights", "(", "self", ",", "form", ",", "load", ")", ":", "adata", "=", "self", ".", "auth_data", "good", "=", "False", "if", "load", ".", "get", "(", "'token'", ",", "False", ")", ":", "for", "sub_auth", "in", "self", ".", "token", "(", "self", ".", "auth_data", ",", "load", ")", ":", "if", "sub_auth", ":", "if", "self", ".", "rights_check", "(", "form", ",", "self", ".", "auth_data", "[", "sub_auth", "[", "'token'", "]", "[", "'eauth'", "]", "]", ",", "sub_auth", "[", "'token'", "]", "[", "'name'", "]", ",", "load", ",", "sub_auth", "[", "'token'", "]", "[", "'eauth'", "]", ")", ":", "return", "True", "log", ".", "warning", "(", "'Authentication failure of type \"token\" occurred.'", ")", "elif", "load", ".", "get", "(", "'eauth'", ")", ":", "for", "sub_auth", "in", "self", ".", "eauth", "(", "self", ".", "auth_data", ",", "load", ")", ":", "if", "sub_auth", ":", "if", "self", ".", "rights_check", "(", "form", ",", "sub_auth", "[", "'sub_auth'", "]", ",", "sub_auth", "[", "'name'", "]", ",", "load", ",", "load", "[", "'eauth'", "]", ")", ":", "return", "True", "log", ".", "warning", "(", "'Authentication failure of type \"eauth\" occurred.'", ")", "return", "False" ]
37.421053
17.368421
def _extractAssociation(self, assoc_response, assoc_session): """Attempt to extract an association from the response, given the association response message and the established association session. @param assoc_response: The association response message from the server @type assoc_response: openid.message.Message @param assoc_session: The association session object that was used when making the request @type assoc_session: depends on the session type of the request @raises ProtocolError: when data is malformed @raises KeyError: when a field is missing @rtype: openid.association.Association """ # Extract the common fields from the response, raising an # exception if they are not found assoc_type = assoc_response.getArg( OPENID_NS, 'assoc_type', no_default) assoc_handle = assoc_response.getArg( OPENID_NS, 'assoc_handle', no_default) # expires_in is a base-10 string. The Python parsing will # accept literals that have whitespace around them and will # accept negative values. Neither of these are really in-spec, # but we think it's OK to accept them. expires_in_str = assoc_response.getArg( OPENID_NS, 'expires_in', no_default) try: expires_in = int(expires_in_str) except ValueError, why: raise ProtocolError('Invalid expires_in field: %s' % (why[0],)) # OpenID 1 has funny association session behaviour. if assoc_response.isOpenID1(): session_type = self._getOpenID1SessionType(assoc_response) else: session_type = assoc_response.getArg( OPENID2_NS, 'session_type', no_default) # Session type mismatch if assoc_session.session_type != session_type: if (assoc_response.isOpenID1() and session_type == 'no-encryption'): # In OpenID 1, any association request can result in a # 'no-encryption' association response. Setting # assoc_session to a new no-encryption session should # make the rest of this function work properly for # that case. assoc_session = PlainTextConsumerSession() else: # Any other mismatch, regardless of protocol version # results in the failure of the association session # altogether. fmt = 'Session type mismatch. Expected %r, got %r' message = fmt % (assoc_session.session_type, session_type) raise ProtocolError(message) # Make sure assoc_type is valid for session_type if assoc_type not in assoc_session.allowed_assoc_types: fmt = 'Unsupported assoc_type for session %s returned: %s' raise ProtocolError(fmt % (assoc_session.session_type, assoc_type)) # Delegate to the association session to extract the secret # from the response, however is appropriate for that session # type. try: secret = assoc_session.extractSecret(assoc_response) except ValueError, why: fmt = 'Malformed response for %s session: %s' raise ProtocolError(fmt % (assoc_session.session_type, why[0])) return Association.fromExpiresIn( expires_in, assoc_handle, secret, assoc_type)
[ "def", "_extractAssociation", "(", "self", ",", "assoc_response", ",", "assoc_session", ")", ":", "# Extract the common fields from the response, raising an", "# exception if they are not found", "assoc_type", "=", "assoc_response", ".", "getArg", "(", "OPENID_NS", ",", "'assoc_type'", ",", "no_default", ")", "assoc_handle", "=", "assoc_response", ".", "getArg", "(", "OPENID_NS", ",", "'assoc_handle'", ",", "no_default", ")", "# expires_in is a base-10 string. The Python parsing will", "# accept literals that have whitespace around them and will", "# accept negative values. Neither of these are really in-spec,", "# but we think it's OK to accept them.", "expires_in_str", "=", "assoc_response", ".", "getArg", "(", "OPENID_NS", ",", "'expires_in'", ",", "no_default", ")", "try", ":", "expires_in", "=", "int", "(", "expires_in_str", ")", "except", "ValueError", ",", "why", ":", "raise", "ProtocolError", "(", "'Invalid expires_in field: %s'", "%", "(", "why", "[", "0", "]", ",", ")", ")", "# OpenID 1 has funny association session behaviour.", "if", "assoc_response", ".", "isOpenID1", "(", ")", ":", "session_type", "=", "self", ".", "_getOpenID1SessionType", "(", "assoc_response", ")", "else", ":", "session_type", "=", "assoc_response", ".", "getArg", "(", "OPENID2_NS", ",", "'session_type'", ",", "no_default", ")", "# Session type mismatch", "if", "assoc_session", ".", "session_type", "!=", "session_type", ":", "if", "(", "assoc_response", ".", "isOpenID1", "(", ")", "and", "session_type", "==", "'no-encryption'", ")", ":", "# In OpenID 1, any association request can result in a", "# 'no-encryption' association response. Setting", "# assoc_session to a new no-encryption session should", "# make the rest of this function work properly for", "# that case.", "assoc_session", "=", "PlainTextConsumerSession", "(", ")", "else", ":", "# Any other mismatch, regardless of protocol version", "# results in the failure of the association session", "# altogether.", "fmt", "=", "'Session type mismatch. Expected %r, got %r'", "message", "=", "fmt", "%", "(", "assoc_session", ".", "session_type", ",", "session_type", ")", "raise", "ProtocolError", "(", "message", ")", "# Make sure assoc_type is valid for session_type", "if", "assoc_type", "not", "in", "assoc_session", ".", "allowed_assoc_types", ":", "fmt", "=", "'Unsupported assoc_type for session %s returned: %s'", "raise", "ProtocolError", "(", "fmt", "%", "(", "assoc_session", ".", "session_type", ",", "assoc_type", ")", ")", "# Delegate to the association session to extract the secret", "# from the response, however is appropriate for that session", "# type.", "try", ":", "secret", "=", "assoc_session", ".", "extractSecret", "(", "assoc_response", ")", "except", "ValueError", ",", "why", ":", "fmt", "=", "'Malformed response for %s session: %s'", "raise", "ProtocolError", "(", "fmt", "%", "(", "assoc_session", ".", "session_type", ",", "why", "[", "0", "]", ")", ")", "return", "Association", ".", "fromExpiresIn", "(", "expires_in", ",", "assoc_handle", ",", "secret", ",", "assoc_type", ")" ]
44.597403
20.519481
def get_text_stream(stream="stdout", encoding=None): """Retrieve a unicode stream wrapper around **sys.stdout** or **sys.stderr**. :param str stream: The name of the stream to wrap from the :mod:`sys` module. :param str encoding: An optional encoding to use. :return: A new :class:`~vistir.misc.StreamWrapper` instance around the stream :rtype: `vistir.misc.StreamWrapper` """ stream_map = {"stdin": sys.stdin, "stdout": sys.stdout, "stderr": sys.stderr} if os.name == "nt" or sys.platform.startswith("win"): from ._winconsole import _get_windows_console_stream, _wrap_std_stream else: _get_windows_console_stream = lambda *args: None # noqa _wrap_std_stream = lambda *args: None # noqa if six.PY2 and stream != "stdin": _wrap_std_stream(stream) sys_stream = stream_map[stream] windows_console = _get_windows_console_stream(sys_stream, encoding, None) if windows_console is not None: return windows_console return get_wrapped_stream(sys_stream, encoding)
[ "def", "get_text_stream", "(", "stream", "=", "\"stdout\"", ",", "encoding", "=", "None", ")", ":", "stream_map", "=", "{", "\"stdin\"", ":", "sys", ".", "stdin", ",", "\"stdout\"", ":", "sys", ".", "stdout", ",", "\"stderr\"", ":", "sys", ".", "stderr", "}", "if", "os", ".", "name", "==", "\"nt\"", "or", "sys", ".", "platform", ".", "startswith", "(", "\"win\"", ")", ":", "from", ".", "_winconsole", "import", "_get_windows_console_stream", ",", "_wrap_std_stream", "else", ":", "_get_windows_console_stream", "=", "lambda", "*", "args", ":", "None", "# noqa", "_wrap_std_stream", "=", "lambda", "*", "args", ":", "None", "# noqa", "if", "six", ".", "PY2", "and", "stream", "!=", "\"stdin\"", ":", "_wrap_std_stream", "(", "stream", ")", "sys_stream", "=", "stream_map", "[", "stream", "]", "windows_console", "=", "_get_windows_console_stream", "(", "sys_stream", ",", "encoding", ",", "None", ")", "if", "windows_console", "is", "not", "None", ":", "return", "windows_console", "return", "get_wrapped_stream", "(", "sys_stream", ",", "encoding", ")" ]
43.041667
21.291667
def swap_time_and_batch_axes(inputs): """Swaps time and batch axis (the first two axis).""" transposed_axes = tf.concat([[1, 0], tf.range(2, tf.rank(inputs))], axis=0) return tf.transpose(inputs, transposed_axes)
[ "def", "swap_time_and_batch_axes", "(", "inputs", ")", ":", "transposed_axes", "=", "tf", ".", "concat", "(", "[", "[", "1", ",", "0", "]", ",", "tf", ".", "range", "(", "2", ",", "tf", ".", "rank", "(", "inputs", ")", ")", "]", ",", "axis", "=", "0", ")", "return", "tf", ".", "transpose", "(", "inputs", ",", "transposed_axes", ")" ]
53.75
11.5
def filter_python_files(files): "Get all python files from the list of files." py_files = [] for f in files: # If we end in .py, or if we don't have an extension and file says that # we are a python script, then add us to the list extension = os.path.splitext(f)[-1] if extension: if extension == '.py': py_files.append(f) elif 'python' in open(f, 'r').readline(): py_files.append(f) elif 'python script' in bash('file {}'.format(f)).value().lower(): py_files.append(f) return py_files
[ "def", "filter_python_files", "(", "files", ")", ":", "py_files", "=", "[", "]", "for", "f", "in", "files", ":", "# If we end in .py, or if we don't have an extension and file says that", "# we are a python script, then add us to the list", "extension", "=", "os", ".", "path", ".", "splitext", "(", "f", ")", "[", "-", "1", "]", "if", "extension", ":", "if", "extension", "==", "'.py'", ":", "py_files", ".", "append", "(", "f", ")", "elif", "'python'", "in", "open", "(", "f", ",", "'r'", ")", ".", "readline", "(", ")", ":", "py_files", ".", "append", "(", "f", ")", "elif", "'python script'", "in", "bash", "(", "'file {}'", ".", "format", "(", "f", ")", ")", ".", "value", "(", ")", ".", "lower", "(", ")", ":", "py_files", ".", "append", "(", "f", ")", "return", "py_files" ]
34.529412
18.647059
def load_module(self, module): """ Load a rules module :param module: :type module: :return: :rtype: """ # pylint: disable=unused-variable for name, obj in inspect.getmembers(module, lambda member: hasattr(member, '__module__') and member.__module__ == module.__name__ and inspect.isclass): self.load_class(obj)
[ "def", "load_module", "(", "self", ",", "module", ")", ":", "# pylint: disable=unused-variable", "for", "name", ",", "obj", "in", "inspect", ".", "getmembers", "(", "module", ",", "lambda", "member", ":", "hasattr", "(", "member", ",", "'__module__'", ")", "and", "member", ".", "__module__", "==", "module", ".", "__name__", "and", "inspect", ".", "isclass", ")", ":", "self", ".", "load_class", "(", "obj", ")" ]
34.266667
19.066667
def do_translate(parser, token): """ This will mark a string for translation and will translate the string for the current language. Usage:: {% trans "this is a test" %} This will mark the string for translation so it will be pulled out by mark-messages.py into the .po files and will run the string through the translation engine. There is a second form:: {% trans "this is a test" noop %} This will only mark for translation, but will return the string unchanged. Use it when you need to store values into forms that should be translated later on. You can use variables instead of constant strings to translate stuff you marked somewhere else:: {% trans variable %} This will just try to translate the contents of the variable ``variable``. Make sure that the string in there is something that is in the .po file. It is possible to store the translated string into a variable:: {% trans "this is a test" as var %} {{ var }} Contextual translations are also supported:: {% trans "this is a test" context "greeting" %} This is equivalent to calling pgettext instead of (u)gettext. """ bits = token.split_contents() if len(bits) < 2: raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0]) message_string = parser.compile_filter(bits[1]) remaining = bits[2:] noop = False asvar = None message_context = None seen = set() invalid_context = {'as', 'noop'} while remaining: option = remaining.pop(0) if option in seen: raise TemplateSyntaxError( "The '%s' option was specified more than once." % option, ) elif option == 'noop': noop = True elif option == 'context': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the context option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) if value in invalid_context: raise TemplateSyntaxError( "Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]), ) message_context = parser.compile_filter(value) elif option == 'as': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the as option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) asvar = value else: raise TemplateSyntaxError( "Unknown argument for '%s' tag: '%s'. The only options " "available are 'noop', 'context' \"xxx\", and 'as VAR'." % ( bits[0], option, ) ) seen.add(option) if phrase_settings.PHRASE_ENABLED: return PhraseTranslateNode(message_string, noop, asvar, message_context) else: return TranslateNode(message_string, noop, asvar, message_context)
[ "def", "do_translate", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", "<", "2", ":", "raise", "TemplateSyntaxError", "(", "\"'%s' takes at least one argument\"", "%", "bits", "[", "0", "]", ")", "message_string", "=", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", "remaining", "=", "bits", "[", "2", ":", "]", "noop", "=", "False", "asvar", "=", "None", "message_context", "=", "None", "seen", "=", "set", "(", ")", "invalid_context", "=", "{", "'as'", ",", "'noop'", "}", "while", "remaining", ":", "option", "=", "remaining", ".", "pop", "(", "0", ")", "if", "option", "in", "seen", ":", "raise", "TemplateSyntaxError", "(", "\"The '%s' option was specified more than once.\"", "%", "option", ",", ")", "elif", "option", "==", "'noop'", ":", "noop", "=", "True", "elif", "option", "==", "'context'", ":", "try", ":", "value", "=", "remaining", ".", "pop", "(", "0", ")", "except", "IndexError", ":", "msg", "=", "\"No argument provided to the '%s' tag for the context option.\"", "%", "bits", "[", "0", "]", "six", ".", "reraise", "(", "TemplateSyntaxError", ",", "TemplateSyntaxError", "(", "msg", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")", "if", "value", "in", "invalid_context", ":", "raise", "TemplateSyntaxError", "(", "\"Invalid argument '%s' provided to the '%s' tag for the context option\"", "%", "(", "value", ",", "bits", "[", "0", "]", ")", ",", ")", "message_context", "=", "parser", ".", "compile_filter", "(", "value", ")", "elif", "option", "==", "'as'", ":", "try", ":", "value", "=", "remaining", ".", "pop", "(", "0", ")", "except", "IndexError", ":", "msg", "=", "\"No argument provided to the '%s' tag for the as option.\"", "%", "bits", "[", "0", "]", "six", ".", "reraise", "(", "TemplateSyntaxError", ",", "TemplateSyntaxError", "(", "msg", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")", "asvar", "=", "value", "else", ":", "raise", "TemplateSyntaxError", "(", "\"Unknown argument for '%s' tag: '%s'. The only options \"", "\"available are 'noop', 'context' \\\"xxx\\\", and 'as VAR'.\"", "%", "(", "bits", "[", "0", "]", ",", "option", ",", ")", ")", "seen", ".", "add", "(", "option", ")", "if", "phrase_settings", ".", "PHRASE_ENABLED", ":", "return", "PhraseTranslateNode", "(", "message_string", ",", "noop", ",", "asvar", ",", "message_context", ")", "else", ":", "return", "TranslateNode", "(", "message_string", ",", "noop", ",", "asvar", ",", "message_context", ")" ]
40.320513
18.961538
def efficiency(self): """Calculate :ref:`pysynphot-formula-qtlam`. Returns ------- ans : float Bandpass dimensionless efficiency. """ mywaveunits = self.waveunits.name self.convert('angstroms') wave = self.wave thru = self.throughput self.convert(mywaveunits) ans = self.trapezoidIntegration(wave, thru/wave) return ans
[ "def", "efficiency", "(", "self", ")", ":", "mywaveunits", "=", "self", ".", "waveunits", ".", "name", "self", ".", "convert", "(", "'angstroms'", ")", "wave", "=", "self", ".", "wave", "thru", "=", "self", ".", "throughput", "self", ".", "convert", "(", "mywaveunits", ")", "ans", "=", "self", ".", "trapezoidIntegration", "(", "wave", ",", "thru", "/", "wave", ")", "return", "ans" ]
23
18.611111
def search( self, token: dict = None, query: str = "", bbox: list = None, poly: str = None, georel: str = None, order_by: str = "_created", order_dir: str = "desc", page_size: int = 100, offset: int = 0, share: str = None, specific_md: list = [], include: list = [], whole_share: bool = True, check: bool = True, augment: bool = False, tags_as_dicts: bool = False, prot: str = "https", ) -> dict: """Search within the resources shared to the application. It's the main method to use. :param str token: API auth token - DEPRECATED: token is now automatically included :param str query: search terms and semantic filters. Equivalent of **q** parameter in Isogeo API. It could be a simple string like *oil* or a tag like *keyword:isogeo:formations* or *keyword:inspire-theme:landcover*. The *AND* operator is applied when various tags are passed. :param list bbox: Bounding box to limit the search. Must be a 4 list of coordinates in WGS84 (EPSG 4326). Could be associated with *georel*. :param str poly: Geographic criteria for the search, in WKT format. Could be associated with *georel*. :param str georel: geometric operator to apply to the bbox or poly parameters. Available values (see: *isogeo.GEORELATIONS*): * 'contains', * 'disjoint', * 'equals', * 'intersects' - [APPLIED BY API if NOT SPECIFIED] * 'overlaps', * 'within'. :param str order_by: sorting results. Available values: * '_created': metadata creation date [DEFAULT if relevance is null] * '_modified': metadata last update * 'title': metadata title * 'created': data creation date (possibly None) * 'modified': data last update date * 'relevance': relevance score calculated by API [DEFAULT]. :param str order_dir: sorting direction. Available values: * 'desc': descending * 'asc': ascending :param int page_size: limits the number of results. Useful to paginate results display. Default value: 100. :param int offset: offset to start page size from a specific results index :param str share: share UUID to filter on :param list specific_md: list of metadata UUIDs to filter on :param list include: subresources that should be returned. Must be a list of strings. Available values: *isogeo.SUBRESOURCES* :param bool whole_share: option to return all results or only the page size. *True* by DEFAULT. :param bool check: option to check query parameters and avoid erros. *True* by DEFAULT. :param bool augment: option to improve API response by adding some tags on the fly (like shares_id) :param bool tags_as_dicts: option to store tags as key/values by filter. :param str prot: https [DEFAULT] or http (use it only for dev and tracking needs). """ # specific resources specific parsing specific_md = checker._check_filter_specific_md(specific_md) # sub resources specific parsing include = checker._check_filter_includes(include) # handling request parameters payload = { "_id": specific_md, "_include": include, "_lang": self.lang, "_limit": page_size, "_offset": offset, "box": bbox, "geo": poly, "rel": georel, "ob": order_by, "od": order_dir, "q": query, "s": share, } if check: checker.check_request_parameters(payload) else: pass # search request search_url = "{}://v1.{}.isogeo.com/resources/search".format(prot, self.api_url) try: search_req = self.get( search_url, headers=self.header, params=payload, proxies=self.proxies, verify=self.ssl, ) except Exception as e: logging.error(e) raise Exception # fast response check checker.check_api_response(search_req) # serializing result into dict and storing resources in variables search_rez = search_req.json() resources_count = search_rez.get("total") # total of metadatas shared # handling Isogeo API pagination # see: http://help.isogeo.com/api/fr/methods/pagination.html if resources_count > page_size and whole_share: # if API returned more than one page of results, let's get the rest! metadatas = [] # a recipient list payload["_limit"] = 100 # now it'll get pages of 100 resources # let's parse pages for idx in range(0, int(ceil(resources_count / 100)) + 1): payload["_offset"] = idx * 100 search_req = self.get( search_url, headers=self.header, params=payload, proxies=self.proxies, verify=self.ssl, ) # storing results by addition metadatas.extend(search_req.json().get("results")) search_rez["results"] = metadatas else: pass # add shares to tags and query if augment: self.add_tags_shares(search_rez.get("tags")) if share: search_rez.get("query")["_shares"] = [share] else: search_rez.get("query")["_shares"] = [] else: pass # store tags in dicts if tags_as_dicts: new_tags = utils.tags_to_dict( tags=search_rez.get("tags"), prev_query=search_rez.get("query") ) # clear search_rez.get("tags").clear() search_rez.get("query").clear() # update search_rez.get("tags").update(new_tags[0]) search_rez.get("query").update(new_tags[1]) else: pass # end of method return search_rez
[ "def", "search", "(", "self", ",", "token", ":", "dict", "=", "None", ",", "query", ":", "str", "=", "\"\"", ",", "bbox", ":", "list", "=", "None", ",", "poly", ":", "str", "=", "None", ",", "georel", ":", "str", "=", "None", ",", "order_by", ":", "str", "=", "\"_created\"", ",", "order_dir", ":", "str", "=", "\"desc\"", ",", "page_size", ":", "int", "=", "100", ",", "offset", ":", "int", "=", "0", ",", "share", ":", "str", "=", "None", ",", "specific_md", ":", "list", "=", "[", "]", ",", "include", ":", "list", "=", "[", "]", ",", "whole_share", ":", "bool", "=", "True", ",", "check", ":", "bool", "=", "True", ",", "augment", ":", "bool", "=", "False", ",", "tags_as_dicts", ":", "bool", "=", "False", ",", "prot", ":", "str", "=", "\"https\"", ",", ")", "->", "dict", ":", "# specific resources specific parsing", "specific_md", "=", "checker", ".", "_check_filter_specific_md", "(", "specific_md", ")", "# sub resources specific parsing", "include", "=", "checker", ".", "_check_filter_includes", "(", "include", ")", "# handling request parameters", "payload", "=", "{", "\"_id\"", ":", "specific_md", ",", "\"_include\"", ":", "include", ",", "\"_lang\"", ":", "self", ".", "lang", ",", "\"_limit\"", ":", "page_size", ",", "\"_offset\"", ":", "offset", ",", "\"box\"", ":", "bbox", ",", "\"geo\"", ":", "poly", ",", "\"rel\"", ":", "georel", ",", "\"ob\"", ":", "order_by", ",", "\"od\"", ":", "order_dir", ",", "\"q\"", ":", "query", ",", "\"s\"", ":", "share", ",", "}", "if", "check", ":", "checker", ".", "check_request_parameters", "(", "payload", ")", "else", ":", "pass", "# search request", "search_url", "=", "\"{}://v1.{}.isogeo.com/resources/search\"", ".", "format", "(", "prot", ",", "self", ".", "api_url", ")", "try", ":", "search_req", "=", "self", ".", "get", "(", "search_url", ",", "headers", "=", "self", ".", "header", ",", "params", "=", "payload", ",", "proxies", "=", "self", ".", "proxies", ",", "verify", "=", "self", ".", "ssl", ",", ")", "except", "Exception", "as", "e", ":", "logging", ".", "error", "(", "e", ")", "raise", "Exception", "# fast response check", "checker", ".", "check_api_response", "(", "search_req", ")", "# serializing result into dict and storing resources in variables", "search_rez", "=", "search_req", ".", "json", "(", ")", "resources_count", "=", "search_rez", ".", "get", "(", "\"total\"", ")", "# total of metadatas shared", "# handling Isogeo API pagination", "# see: http://help.isogeo.com/api/fr/methods/pagination.html", "if", "resources_count", ">", "page_size", "and", "whole_share", ":", "# if API returned more than one page of results, let's get the rest!", "metadatas", "=", "[", "]", "# a recipient list", "payload", "[", "\"_limit\"", "]", "=", "100", "# now it'll get pages of 100 resources", "# let's parse pages", "for", "idx", "in", "range", "(", "0", ",", "int", "(", "ceil", "(", "resources_count", "/", "100", ")", ")", "+", "1", ")", ":", "payload", "[", "\"_offset\"", "]", "=", "idx", "*", "100", "search_req", "=", "self", ".", "get", "(", "search_url", ",", "headers", "=", "self", ".", "header", ",", "params", "=", "payload", ",", "proxies", "=", "self", ".", "proxies", ",", "verify", "=", "self", ".", "ssl", ",", ")", "# storing results by addition", "metadatas", ".", "extend", "(", "search_req", ".", "json", "(", ")", ".", "get", "(", "\"results\"", ")", ")", "search_rez", "[", "\"results\"", "]", "=", "metadatas", "else", ":", "pass", "# add shares to tags and query", "if", "augment", ":", "self", ".", "add_tags_shares", "(", "search_rez", ".", "get", "(", "\"tags\"", ")", ")", "if", "share", ":", "search_rez", ".", "get", "(", "\"query\"", ")", "[", "\"_shares\"", "]", "=", "[", "share", "]", "else", ":", "search_rez", ".", "get", "(", "\"query\"", ")", "[", "\"_shares\"", "]", "=", "[", "]", "else", ":", "pass", "# store tags in dicts", "if", "tags_as_dicts", ":", "new_tags", "=", "utils", ".", "tags_to_dict", "(", "tags", "=", "search_rez", ".", "get", "(", "\"tags\"", ")", ",", "prev_query", "=", "search_rez", ".", "get", "(", "\"query\"", ")", ")", "# clear", "search_rez", ".", "get", "(", "\"tags\"", ")", ".", "clear", "(", ")", "search_rez", ".", "get", "(", "\"query\"", ")", ".", "clear", "(", ")", "# update", "search_rez", ".", "get", "(", "\"tags\"", ")", ".", "update", "(", "new_tags", "[", "0", "]", ")", "search_rez", ".", "get", "(", "\"query\"", ")", ".", "update", "(", "new_tags", "[", "1", "]", ")", "else", ":", "pass", "# end of method", "return", "search_rez" ]
35.418079
18.672316
def compose_MDAL_dic(self, site, point_type, start, end, var, agg, window, aligned, points=None, return_names=False): """ Create dictionary for MDAL request. Parameters ---------- site : str Building name. start : str Start date - 'YYYY-MM-DDTHH:MM:SSZ' end : str End date - 'YYYY-MM-DDTHH:MM:SSZ' point_type : str Type of data, i.e. Green_Button_Meter, Building_Electric_Meter... var : str Variable - "meter", "weather"... agg : str Aggregation - MEAN, SUM, RAW... window : str Size of the moving window. aligned : bool ??? return_names : bool ??? Returns ------- (df, mapping, context) ??? """ # Convert time to UTC start = self.convert_to_utc(start) end = self.convert_to_utc(end) request = {} # Add Time Details - single set for one or multiple series request['Time'] = { 'Start': start, 'End': end, 'Window': window, 'Aligned': aligned } # Define Variables request["Variables"] = {} request['Composition'] = [] request['Aggregation'] = {} if isinstance(point_type, str): # if point_type is a string -> single type of point requested request["Variables"][var] = self.compose_BRICK_query(point_type=point_type,site=site) # pass one point type at the time request['Composition'] = [var] request['Aggregation'][var] = [agg] elif isinstance(point_type, list): # loop through all the point_types and create one section of the brick query at the time for idx, point in enumerate(point_type): request["Variables"][var[idx]] = self.compose_BRICK_query(point_type=point,site=site) # pass one point type at the time request['Composition'].append(var[idx]) if isinstance(agg, str): # if agg is a string -> single type of aggregation requested request['Aggregation'][var[idx]] = [agg] elif isinstance(agg, list): # if agg is a list -> expected one agg per point request['Aggregation'][var[idx]] = [agg[idx]] return request
[ "def", "compose_MDAL_dic", "(", "self", ",", "site", ",", "point_type", ",", "start", ",", "end", ",", "var", ",", "agg", ",", "window", ",", "aligned", ",", "points", "=", "None", ",", "return_names", "=", "False", ")", ":", "# Convert time to UTC", "start", "=", "self", ".", "convert_to_utc", "(", "start", ")", "end", "=", "self", ".", "convert_to_utc", "(", "end", ")", "request", "=", "{", "}", "# Add Time Details - single set for one or multiple series", "request", "[", "'Time'", "]", "=", "{", "'Start'", ":", "start", ",", "'End'", ":", "end", ",", "'Window'", ":", "window", ",", "'Aligned'", ":", "aligned", "}", "# Define Variables ", "request", "[", "\"Variables\"", "]", "=", "{", "}", "request", "[", "'Composition'", "]", "=", "[", "]", "request", "[", "'Aggregation'", "]", "=", "{", "}", "if", "isinstance", "(", "point_type", ",", "str", ")", ":", "# if point_type is a string -> single type of point requested", "request", "[", "\"Variables\"", "]", "[", "var", "]", "=", "self", ".", "compose_BRICK_query", "(", "point_type", "=", "point_type", ",", "site", "=", "site", ")", "# pass one point type at the time", "request", "[", "'Composition'", "]", "=", "[", "var", "]", "request", "[", "'Aggregation'", "]", "[", "var", "]", "=", "[", "agg", "]", "elif", "isinstance", "(", "point_type", ",", "list", ")", ":", "# loop through all the point_types and create one section of the brick query at the time", "for", "idx", ",", "point", "in", "enumerate", "(", "point_type", ")", ":", "request", "[", "\"Variables\"", "]", "[", "var", "[", "idx", "]", "]", "=", "self", ".", "compose_BRICK_query", "(", "point_type", "=", "point", ",", "site", "=", "site", ")", "# pass one point type at the time", "request", "[", "'Composition'", "]", ".", "append", "(", "var", "[", "idx", "]", ")", "if", "isinstance", "(", "agg", ",", "str", ")", ":", "# if agg is a string -> single type of aggregation requested", "request", "[", "'Aggregation'", "]", "[", "var", "[", "idx", "]", "]", "=", "[", "agg", "]", "elif", "isinstance", "(", "agg", ",", "list", ")", ":", "# if agg is a list -> expected one agg per point", "request", "[", "'Aggregation'", "]", "[", "var", "[", "idx", "]", "]", "=", "[", "agg", "[", "idx", "]", "]", "return", "request" ]
37.41791
22.58209
def get(self, *args, **kwargs): """ The base activation logic; subclasses should leave this method alone and implement activate(), which is called from this method. """ extra_context = {} try: activated_user = self.activate(*args, **kwargs) except ActivationError as e: extra_context['activation_error'] = { 'message': e.message, 'code': e.code, 'params': e.params } else: signals.user_activated.send( sender=self.__class__, user=activated_user, request=self.request ) return HttpResponseRedirect( force_text( self.get_success_url(activated_user) ) ) context_data = self.get_context_data() context_data.update(extra_context) return self.render_to_response(context_data)
[ "def", "get", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "extra_context", "=", "{", "}", "try", ":", "activated_user", "=", "self", ".", "activate", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "ActivationError", "as", "e", ":", "extra_context", "[", "'activation_error'", "]", "=", "{", "'message'", ":", "e", ".", "message", ",", "'code'", ":", "e", ".", "code", ",", "'params'", ":", "e", ".", "params", "}", "else", ":", "signals", ".", "user_activated", ".", "send", "(", "sender", "=", "self", ".", "__class__", ",", "user", "=", "activated_user", ",", "request", "=", "self", ".", "request", ")", "return", "HttpResponseRedirect", "(", "force_text", "(", "self", ".", "get_success_url", "(", "activated_user", ")", ")", ")", "context_data", "=", "self", ".", "get_context_data", "(", ")", "context_data", ".", "update", "(", "extra_context", ")", "return", "self", ".", "render_to_response", "(", "context_data", ")" ]
32.3
13.7
def add(self, cell): """Append a cell into the stack. Parameters ---------- cell : BaseRNNCell The cell to be appended. During unroll, previous cell's output (or raw inputs if no previous cell) is used as the input to this cell. """ self._cells.append(cell) if self._override_cell_params: assert cell._own_params, \ "Either specify params for SequentialRNNCell " \ "or child cells, not both." cell.params._params.update(self.params._params) self.params._params.update(cell.params._params)
[ "def", "add", "(", "self", ",", "cell", ")", ":", "self", ".", "_cells", ".", "append", "(", "cell", ")", "if", "self", ".", "_override_cell_params", ":", "assert", "cell", ".", "_own_params", ",", "\"Either specify params for SequentialRNNCell \"", "\"or child cells, not both.\"", "cell", ".", "params", ".", "_params", ".", "update", "(", "self", ".", "params", ".", "_params", ")", "self", ".", "params", ".", "_params", ".", "update", "(", "cell", ".", "params", ".", "_params", ")" ]
38.625
16.6875
def get_hosts_info(self): """ Returns a list of dicts with information about the known hosts. The dict-keys are: 'ip', 'name', 'mac', 'status' """ result = [] index = 0 while index < self.host_numbers: host = self.get_generic_host_entry(index) result.append({ 'ip': host['NewIPAddress'], 'name': host['NewHostName'], 'mac': host['NewMACAddress'], 'status': host['NewActive']}) index += 1 return result
[ "def", "get_hosts_info", "(", "self", ")", ":", "result", "=", "[", "]", "index", "=", "0", "while", "index", "<", "self", ".", "host_numbers", ":", "host", "=", "self", ".", "get_generic_host_entry", "(", "index", ")", "result", ".", "append", "(", "{", "'ip'", ":", "host", "[", "'NewIPAddress'", "]", ",", "'name'", ":", "host", "[", "'NewHostName'", "]", ",", "'mac'", ":", "host", "[", "'NewMACAddress'", "]", ",", "'status'", ":", "host", "[", "'NewActive'", "]", "}", ")", "index", "+=", "1", "return", "result" ]
34.375
11.625
def DecimalField(default=NOTHING, required=True, repr=True, cmp=True, key=None): """ Create new decimal field on a model. :param default: any decimal value :param bool required: whether or not the object is invalid if not provided. :param bool repr: include this field should appear in object's repr. :param bool cmp: include this field in generated comparison. :param string key: override name of the value when converted to dict. """ default = _init_fields.init_default(required, default, None) validator = _init_fields.init_validator(required, Decimal) return attrib(default=default, converter=lambda x: Decimal(x), validator=validator, repr=repr, cmp=cmp, metadata=dict(key=key))
[ "def", "DecimalField", "(", "default", "=", "NOTHING", ",", "required", "=", "True", ",", "repr", "=", "True", ",", "cmp", "=", "True", ",", "key", "=", "None", ")", ":", "default", "=", "_init_fields", ".", "init_default", "(", "required", ",", "default", ",", "None", ")", "validator", "=", "_init_fields", ".", "init_validator", "(", "required", ",", "Decimal", ")", "return", "attrib", "(", "default", "=", "default", ",", "converter", "=", "lambda", "x", ":", "Decimal", "(", "x", ")", ",", "validator", "=", "validator", ",", "repr", "=", "repr", ",", "cmp", "=", "cmp", ",", "metadata", "=", "dict", "(", "key", "=", "key", ")", ")" ]
47.875
19
def trap_http_exception(self, e): """Checks if an HTTP exception should be trapped or not. By default this will return `False` for all exceptions except for a bad request key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to `True`. It also returns `True` if ``TRAP_HTTP_EXCEPTIONS`` is set to `True`. This is called for all HTTP exceptions raised by a view function. If it returns `True` for any exception the error handler for this exception is not called and it shows up as regular exception in the traceback. This is helpful for debugging implicitly raised HTTP exceptions. .. versionadded:: 0.8 """ if self.config['TRAP_HTTP_EXCEPTIONS']: return True if self.config['TRAP_BAD_REQUEST_ERRORS']: return isinstance(e, BadRequest) return False
[ "def", "trap_http_exception", "(", "self", ",", "e", ")", ":", "if", "self", ".", "config", "[", "'TRAP_HTTP_EXCEPTIONS'", "]", ":", "return", "True", "if", "self", ".", "config", "[", "'TRAP_BAD_REQUEST_ERRORS'", "]", ":", "return", "isinstance", "(", "e", ",", "BadRequest", ")", "return", "False" ]
45.473684
21.526316
def search(self, query, indices=None, doc_types=None, model=None, scan=False, headers=None, **query_params): """Execute a search against one or more indices to get the resultset. `query` must be a Search object, a Query object, or a custom dictionary of search parameters using the query DSL to be passed directly. """ if isinstance(query, Search): search = query elif isinstance(query, (Query, dict)): search = Search(query) else: raise InvalidQuery("search() must be supplied with a Search or Query object, or a dict") if scan: query_params.setdefault("search_type", "scan") query_params.setdefault("scroll", "10m") return ResultSet(self, search, indices=indices, doc_types=doc_types, model=model, query_params=query_params, headers=headers)
[ "def", "search", "(", "self", ",", "query", ",", "indices", "=", "None", ",", "doc_types", "=", "None", ",", "model", "=", "None", ",", "scan", "=", "False", ",", "headers", "=", "None", ",", "*", "*", "query_params", ")", ":", "if", "isinstance", "(", "query", ",", "Search", ")", ":", "search", "=", "query", "elif", "isinstance", "(", "query", ",", "(", "Query", ",", "dict", ")", ")", ":", "search", "=", "Search", "(", "query", ")", "else", ":", "raise", "InvalidQuery", "(", "\"search() must be supplied with a Search or Query object, or a dict\"", ")", "if", "scan", ":", "query_params", ".", "setdefault", "(", "\"search_type\"", ",", "\"scan\"", ")", "query_params", ".", "setdefault", "(", "\"scroll\"", ",", "\"10m\"", ")", "return", "ResultSet", "(", "self", ",", "search", ",", "indices", "=", "indices", ",", "doc_types", "=", "doc_types", ",", "model", "=", "model", ",", "query_params", "=", "query_params", ",", "headers", "=", "headers", ")" ]
44.6
25.9
def conf(self, conf): """ 设置当前 WechatConf 实例 """ self.__conf = conf self.__request = WechatRequest(conf=self.__conf)
[ "def", "conf", "(", "self", ",", "conf", ")", ":", "self", ".", "__conf", "=", "conf", "self", ".", "__request", "=", "WechatRequest", "(", "conf", "=", "self", ".", "__conf", ")" ]
34.5
12.25
def delete_contacts( self, ids: List[int] ): """Use this method to delete contacts from your Telegram address book. Args: ids (List of ``int``): A list of unique identifiers for the target users. Can be an ID (int), a username (string) or phone number (string). Returns: True on success. Raises: :class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. """ contacts = [] for i in ids: try: input_user = self.resolve_peer(i) except PeerIdInvalid: continue else: if isinstance(input_user, types.InputPeerUser): contacts.append(input_user) return self.send( functions.contacts.DeleteContacts( id=contacts ) )
[ "def", "delete_contacts", "(", "self", ",", "ids", ":", "List", "[", "int", "]", ")", ":", "contacts", "=", "[", "]", "for", "i", "in", "ids", ":", "try", ":", "input_user", "=", "self", ".", "resolve_peer", "(", "i", ")", "except", "PeerIdInvalid", ":", "continue", "else", ":", "if", "isinstance", "(", "input_user", ",", "types", ".", "InputPeerUser", ")", ":", "contacts", ".", "append", "(", "input_user", ")", "return", "self", ".", "send", "(", "functions", ".", "contacts", ".", "DeleteContacts", "(", "id", "=", "contacts", ")", ")" ]
27.151515
22.454545
def main(): """The command line interface for the ``pip-accel`` program.""" arguments = sys.argv[1:] # If no arguments are given, the help text of pip-accel is printed. if not arguments: usage() sys.exit(0) # If no install subcommand is given we pass the command line straight # to pip without any changes and exit immediately afterwards. if 'install' not in arguments: # This will not return. os.execvp('pip', ['pip'] + arguments) else: arguments = [arg for arg in arguments if arg != 'install'] config = Config() # Initialize logging output. coloredlogs.install( fmt=config.log_format, level=config.log_verbosity, ) # Adjust verbosity based on -v, -q, --verbose, --quiet options. for argument in list(arguments): if match_option(argument, '-v', '--verbose'): coloredlogs.increase_verbosity() elif match_option(argument, '-q', '--quiet'): coloredlogs.decrease_verbosity() # Perform the requested action(s). try: accelerator = PipAccelerator(config) accelerator.install_from_arguments(arguments) except NothingToDoError as e: # Don't print a traceback for this (it's not very user friendly) and # exit with status zero to stay compatible with pip. For more details # please refer to https://github.com/paylogic/pip-accel/issues/47. logger.warning("%s", e) sys.exit(0) except Exception: logger.exception("Caught unhandled exception!") sys.exit(1)
[ "def", "main", "(", ")", ":", "arguments", "=", "sys", ".", "argv", "[", "1", ":", "]", "# If no arguments are given, the help text of pip-accel is printed.", "if", "not", "arguments", ":", "usage", "(", ")", "sys", ".", "exit", "(", "0", ")", "# If no install subcommand is given we pass the command line straight", "# to pip without any changes and exit immediately afterwards.", "if", "'install'", "not", "in", "arguments", ":", "# This will not return.", "os", ".", "execvp", "(", "'pip'", ",", "[", "'pip'", "]", "+", "arguments", ")", "else", ":", "arguments", "=", "[", "arg", "for", "arg", "in", "arguments", "if", "arg", "!=", "'install'", "]", "config", "=", "Config", "(", ")", "# Initialize logging output.", "coloredlogs", ".", "install", "(", "fmt", "=", "config", ".", "log_format", ",", "level", "=", "config", ".", "log_verbosity", ",", ")", "# Adjust verbosity based on -v, -q, --verbose, --quiet options.", "for", "argument", "in", "list", "(", "arguments", ")", ":", "if", "match_option", "(", "argument", ",", "'-v'", ",", "'--verbose'", ")", ":", "coloredlogs", ".", "increase_verbosity", "(", ")", "elif", "match_option", "(", "argument", ",", "'-q'", ",", "'--quiet'", ")", ":", "coloredlogs", ".", "decrease_verbosity", "(", ")", "# Perform the requested action(s).", "try", ":", "accelerator", "=", "PipAccelerator", "(", "config", ")", "accelerator", ".", "install_from_arguments", "(", "arguments", ")", "except", "NothingToDoError", "as", "e", ":", "# Don't print a traceback for this (it's not very user friendly) and", "# exit with status zero to stay compatible with pip. For more details", "# please refer to https://github.com/paylogic/pip-accel/issues/47.", "logger", ".", "warning", "(", "\"%s\"", ",", "e", ")", "sys", ".", "exit", "(", "0", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "\"Caught unhandled exception!\"", ")", "sys", ".", "exit", "(", "1", ")" ]
39.666667
17.435897
def get_event_exchange(service_name): """ Get an exchange for ``service_name`` events. """ exchange_name = "{}.events".format(service_name) exchange = Exchange( exchange_name, type='topic', durable=True, delivery_mode=PERSISTENT ) return exchange
[ "def", "get_event_exchange", "(", "service_name", ")", ":", "exchange_name", "=", "\"{}.events\"", ".", "format", "(", "service_name", ")", "exchange", "=", "Exchange", "(", "exchange_name", ",", "type", "=", "'topic'", ",", "durable", "=", "True", ",", "delivery_mode", "=", "PERSISTENT", ")", "return", "exchange" ]
30.111111
18
def get_response_form(self, assessment_section_id, item_id): """Gets the response form for submitting an answer. arg: assessment_section_id (osid.id.Id): ``Id`` of the ``AssessmentSection`` arg: item_id (osid.id.Id): ``Id`` of the ``Item`` return: (osid.assessment.AnswerForm) - an answer form raise: IllegalState - ``has_assessment_section_begun()`` is ``false or is_assessment_section_over()`` is ``true`` raise: NotFound - ``assessment_section_id`` or ``item_id`` is not found, or ``item_id`` not part of ``assessment_section_id`` raise: NullArgument - ``assessment_section_id`` or ``item_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred *compliance: mandatory -- This method must be implemented.* """ if not isinstance(item_id, ABCId): raise errors.InvalidArgument('argument is not a valid OSID Id') # This is a little hack to get the answer record types from the Item's # first Answer record types. Should really get it from item genus types somehow: record_type_data_sets = get_registry('ANSWER_RECORD_TYPES', self._runtime) section = self.get_assessment_section(assessment_section_id) # because we're now giving session-unique question IDs question = section.get_question(item_id) ils = section._get_item_lookup_session() real_item_id = Id(question._my_map['itemId']) item = ils.get_item(real_item_id) item_map = item.object_map all_answers = item_map['answers'] try: all_answers += [wa.object_map for wa in item.get_wrong_answers()] except AttributeError: pass answer_record_types = [] if len(all_answers) > 0: for record_type_idstr in all_answers[0]['recordTypeIds']: identifier = Id(record_type_idstr).get_identifier() if identifier in record_type_data_sets: answer_record_types.append(Type(**record_type_data_sets[identifier])) else: for record_type_idstr in item_map['question']['recordTypeIds']: identifier = Id(record_type_idstr).get_identifier() if identifier in record_type_data_sets: answer_record_types.append(Type(**record_type_data_sets[identifier])) # Thus endith the hack. obj_form = objects.AnswerForm( bank_id=self._catalog_id, record_types=answer_record_types, item_id=item_id, catalog_id=self._catalog_id, assessment_section_id=assessment_section_id, runtime=self._runtime, proxy=self._proxy) obj_form._for_update = False # This may be redundant self._forms[obj_form.get_id().get_identifier()] = not SUBMITTED return obj_form
[ "def", "get_response_form", "(", "self", ",", "assessment_section_id", ",", "item_id", ")", ":", "if", "not", "isinstance", "(", "item_id", ",", "ABCId", ")", ":", "raise", "errors", ".", "InvalidArgument", "(", "'argument is not a valid OSID Id'", ")", "# This is a little hack to get the answer record types from the Item's", "# first Answer record types. Should really get it from item genus types somehow:", "record_type_data_sets", "=", "get_registry", "(", "'ANSWER_RECORD_TYPES'", ",", "self", ".", "_runtime", ")", "section", "=", "self", ".", "get_assessment_section", "(", "assessment_section_id", ")", "# because we're now giving session-unique question IDs", "question", "=", "section", ".", "get_question", "(", "item_id", ")", "ils", "=", "section", ".", "_get_item_lookup_session", "(", ")", "real_item_id", "=", "Id", "(", "question", ".", "_my_map", "[", "'itemId'", "]", ")", "item", "=", "ils", ".", "get_item", "(", "real_item_id", ")", "item_map", "=", "item", ".", "object_map", "all_answers", "=", "item_map", "[", "'answers'", "]", "try", ":", "all_answers", "+=", "[", "wa", ".", "object_map", "for", "wa", "in", "item", ".", "get_wrong_answers", "(", ")", "]", "except", "AttributeError", ":", "pass", "answer_record_types", "=", "[", "]", "if", "len", "(", "all_answers", ")", ">", "0", ":", "for", "record_type_idstr", "in", "all_answers", "[", "0", "]", "[", "'recordTypeIds'", "]", ":", "identifier", "=", "Id", "(", "record_type_idstr", ")", ".", "get_identifier", "(", ")", "if", "identifier", "in", "record_type_data_sets", ":", "answer_record_types", ".", "append", "(", "Type", "(", "*", "*", "record_type_data_sets", "[", "identifier", "]", ")", ")", "else", ":", "for", "record_type_idstr", "in", "item_map", "[", "'question'", "]", "[", "'recordTypeIds'", "]", ":", "identifier", "=", "Id", "(", "record_type_idstr", ")", ".", "get_identifier", "(", ")", "if", "identifier", "in", "record_type_data_sets", ":", "answer_record_types", ".", "append", "(", "Type", "(", "*", "*", "record_type_data_sets", "[", "identifier", "]", ")", ")", "# Thus endith the hack.", "obj_form", "=", "objects", ".", "AnswerForm", "(", "bank_id", "=", "self", ".", "_catalog_id", ",", "record_types", "=", "answer_record_types", ",", "item_id", "=", "item_id", ",", "catalog_id", "=", "self", ".", "_catalog_id", ",", "assessment_section_id", "=", "assessment_section_id", ",", "runtime", "=", "self", ".", "_runtime", ",", "proxy", "=", "self", ".", "_proxy", ")", "obj_form", ".", "_for_update", "=", "False", "# This may be redundant", "self", ".", "_forms", "[", "obj_form", ".", "get_id", "(", ")", ".", "get_identifier", "(", ")", "]", "=", "not", "SUBMITTED", "return", "obj_form" ]
48
20.612903
def update(self, campaign_id, schedule, nick=None): '''xxxxx.xxxxx.campaign.schedule.update =================================== 更新一个推广计划的分时折扣设置''' request = TOPRequest('xxxxx.xxxxx.campaign.schedule.update') request['campaign_id'] = campaign_id request['schedule'] = schedule if nick!=None: request['nick'] = nick self.create(self.execute(request), fields=['success','result','success','result_code','result_message'], models={'result':CampaignSchedule}) return self.result
[ "def", "update", "(", "self", ",", "campaign_id", ",", "schedule", ",", "nick", "=", "None", ")", ":", "request", "=", "TOPRequest", "(", "'xxxxx.xxxxx.campaign.schedule.update'", ")", "request", "[", "'campaign_id'", "]", "=", "campaign_id", "request", "[", "'schedule'", "]", "=", "schedule", "if", "nick", "!=", "None", ":", "request", "[", "'nick'", "]", "=", "nick", "self", ".", "create", "(", "self", ".", "execute", "(", "request", ")", ",", "fields", "=", "[", "'success'", ",", "'result'", ",", "'success'", ",", "'result_code'", ",", "'result_message'", "]", ",", "models", "=", "{", "'result'", ":", "CampaignSchedule", "}", ")", "return", "self", ".", "result" ]
53.6
19.6
def _fill_empty_sessions(self, fill_subjects, fill_visits): """ Fill in tree with additional empty subjects and/or visits to allow the study to pull its inputs from external repositories """ if fill_subjects is None: fill_subjects = [s.id for s in self.subjects] if fill_visits is None: fill_visits = [v.id for v in self.complete_visits] for subject_id in fill_subjects: try: subject = self.subject(subject_id) except ArcanaNameError: subject = self._subjects[subject_id] = Subject( subject_id, [], [], []) for visit_id in fill_visits: try: subject.session(visit_id) except ArcanaNameError: session = Session(subject_id, visit_id, [], []) subject._sessions[visit_id] = session try: visit = self.visit(visit_id) except ArcanaNameError: visit = self._visits[visit_id] = Visit( visit_id, [], [], []) visit._sessions[subject_id] = session
[ "def", "_fill_empty_sessions", "(", "self", ",", "fill_subjects", ",", "fill_visits", ")", ":", "if", "fill_subjects", "is", "None", ":", "fill_subjects", "=", "[", "s", ".", "id", "for", "s", "in", "self", ".", "subjects", "]", "if", "fill_visits", "is", "None", ":", "fill_visits", "=", "[", "v", ".", "id", "for", "v", "in", "self", ".", "complete_visits", "]", "for", "subject_id", "in", "fill_subjects", ":", "try", ":", "subject", "=", "self", ".", "subject", "(", "subject_id", ")", "except", "ArcanaNameError", ":", "subject", "=", "self", ".", "_subjects", "[", "subject_id", "]", "=", "Subject", "(", "subject_id", ",", "[", "]", ",", "[", "]", ",", "[", "]", ")", "for", "visit_id", "in", "fill_visits", ":", "try", ":", "subject", ".", "session", "(", "visit_id", ")", "except", "ArcanaNameError", ":", "session", "=", "Session", "(", "subject_id", ",", "visit_id", ",", "[", "]", ",", "[", "]", ")", "subject", ".", "_sessions", "[", "visit_id", "]", "=", "session", "try", ":", "visit", "=", "self", ".", "visit", "(", "visit_id", ")", "except", "ArcanaNameError", ":", "visit", "=", "self", ".", "_visits", "[", "visit_id", "]", "=", "Visit", "(", "visit_id", ",", "[", "]", ",", "[", "]", ",", "[", "]", ")", "visit", ".", "_sessions", "[", "subject_id", "]", "=", "session" ]
44.592593
12.814815
def get_args(cls, dist, header=None): """ Yield write_script() argument tuples for a distribution's console_scripts and gui_scripts entry points. """ if header is None: header = cls.get_header() spec = str(dist.as_requirement()) for type_ in 'console', 'gui': group = type_ + '_scripts' for name, ep in dist.get_entry_map(group).items(): cls._ensure_safe_name(name) script_text = cls.template % locals() args = cls._get_script_args(type_, name, header, script_text) for res in args: yield res
[ "def", "get_args", "(", "cls", ",", "dist", ",", "header", "=", "None", ")", ":", "if", "header", "is", "None", ":", "header", "=", "cls", ".", "get_header", "(", ")", "spec", "=", "str", "(", "dist", ".", "as_requirement", "(", ")", ")", "for", "type_", "in", "'console'", ",", "'gui'", ":", "group", "=", "type_", "+", "'_scripts'", "for", "name", ",", "ep", "in", "dist", ".", "get_entry_map", "(", "group", ")", ".", "items", "(", ")", ":", "cls", ".", "_ensure_safe_name", "(", "name", ")", "script_text", "=", "cls", ".", "template", "%", "locals", "(", ")", "args", "=", "cls", ".", "_get_script_args", "(", "type_", ",", "name", ",", "header", ",", "script_text", ")", "for", "res", "in", "args", ":", "yield", "res" ]
40.8125
9.8125
def load_json(data=None, path=None, name='NT'): """ Map namedtuples with json data. """ if data and not path: return mapper(json.loads(data), _nt_name=name) if path and not data: return mapper(json.load(path), _nt_name=name) if data and path: raise ValueError('expected one source and received two')
[ "def", "load_json", "(", "data", "=", "None", ",", "path", "=", "None", ",", "name", "=", "'NT'", ")", ":", "if", "data", "and", "not", "path", ":", "return", "mapper", "(", "json", ".", "loads", "(", "data", ")", ",", "_nt_name", "=", "name", ")", "if", "path", "and", "not", "data", ":", "return", "mapper", "(", "json", ".", "load", "(", "path", ")", ",", "_nt_name", "=", "name", ")", "if", "data", "and", "path", ":", "raise", "ValueError", "(", "'expected one source and received two'", ")" ]
41.5
13.375
def delete_access_key(self, access_key_id, user_name=None): """ Delete an access key associated with a user. If the user_name is not specified, it is determined implicitly based on the AWS Access Key ID used to sign the request. :type access_key_id: string :param access_key_id: The ID of the access key to be deleted. :type user_name: string :param user_name: The username of the user """ params = {'AccessKeyId' : access_key_id} if user_name: params['UserName'] = user_name return self.get_response('DeleteAccessKey', params)
[ "def", "delete_access_key", "(", "self", ",", "access_key_id", ",", "user_name", "=", "None", ")", ":", "params", "=", "{", "'AccessKeyId'", ":", "access_key_id", "}", "if", "user_name", ":", "params", "[", "'UserName'", "]", "=", "user_name", "return", "self", ".", "get_response", "(", "'DeleteAccessKey'", ",", "params", ")" ]
34.555556
19.222222
def _zip_request_params(self, urls, query_params, data): """Massages inputs and returns a list of 3-tuples zipping them up. This is all the smarts behind deciding how many requests to issue. It's fine for an input to have 0, 1, or a list of values. If there are two inputs each with a list of values, the cardinality of those lists much match. Args: urls - 1 string URL or a list of URLs query_params - None, 1 dict, or a list of dicts data - None, 1 dict or string, or a list of dicts or strings Returns: A list of 3-tuples (url, query_param, data) Raises: InvalidRequestError - if cardinality of lists does not match """ # Everybody gets to be a list if not isinstance(urls, list): urls = [urls] if not isinstance(query_params, list): query_params = [query_params] if not isinstance(data, list): data = [data] # Counts must not mismatch url_count = len(urls) query_param_count = len(query_params) data_count = len(data) max_count = max(url_count, query_param_count, data_count) if ( max_count > url_count > 1 or max_count > query_param_count > 1 or max_count > data_count > 1 ): raise InvalidRequestError( 'Mismatched parameter count url_count:{0} query_param_count:{1} data_count:{2} max_count:{3}', url_count, query_param_count, data_count, max_count, ) # Pad out lists if url_count < max_count: urls = urls * max_count if query_param_count < max_count: query_params = query_params * max_count if data_count < max_count: data = data * max_count return list(zip(urls, query_params, data))
[ "def", "_zip_request_params", "(", "self", ",", "urls", ",", "query_params", ",", "data", ")", ":", "# Everybody gets to be a list", "if", "not", "isinstance", "(", "urls", ",", "list", ")", ":", "urls", "=", "[", "urls", "]", "if", "not", "isinstance", "(", "query_params", ",", "list", ")", ":", "query_params", "=", "[", "query_params", "]", "if", "not", "isinstance", "(", "data", ",", "list", ")", ":", "data", "=", "[", "data", "]", "# Counts must not mismatch", "url_count", "=", "len", "(", "urls", ")", "query_param_count", "=", "len", "(", "query_params", ")", "data_count", "=", "len", "(", "data", ")", "max_count", "=", "max", "(", "url_count", ",", "query_param_count", ",", "data_count", ")", "if", "(", "max_count", ">", "url_count", ">", "1", "or", "max_count", ">", "query_param_count", ">", "1", "or", "max_count", ">", "data_count", ">", "1", ")", ":", "raise", "InvalidRequestError", "(", "'Mismatched parameter count url_count:{0} query_param_count:{1} data_count:{2} max_count:{3}'", ",", "url_count", ",", "query_param_count", ",", "data_count", ",", "max_count", ",", ")", "# Pad out lists", "if", "url_count", "<", "max_count", ":", "urls", "=", "urls", "*", "max_count", "if", "query_param_count", "<", "max_count", ":", "query_params", "=", "query_params", "*", "max_count", "if", "data_count", "<", "max_count", ":", "data", "=", "data", "*", "max_count", "return", "list", "(", "zip", "(", "urls", ",", "query_params", ",", "data", ")", ")" ]
36.568627
19.607843
def set_alarm_state(self, state): """ :param state: a boolean of ture (on) or false ('off') :return: nothing """ values = {"desired_state": {"alarm_enabled": state}} response = self.api_interface.set_device_state(self, values) self._update_state_from_response(response)
[ "def", "set_alarm_state", "(", "self", ",", "state", ")", ":", "values", "=", "{", "\"desired_state\"", ":", "{", "\"alarm_enabled\"", ":", "state", "}", "}", "response", "=", "self", ".", "api_interface", ".", "set_device_state", "(", "self", ",", "values", ")", "self", ".", "_update_state_from_response", "(", "response", ")" ]
40.625
12.875
def cohort_queryplan(plan): """ Input: { 'source': 'kronos', # Name of data source from settings 'cohort': {'stream': CohortTest.EMAIL_STREAM, # Kronos stream to define cohort from. 'transform': lambda x: x, # Transformations on the kstream. 'start': date.now(), # The day of the first cohort. 'unit': DateUnit.XX, # Users are in the same cohort # if they are in the same day/week. 'cohorts': 5 # How many cohorts (days/weeks/months) # to track. 'grouping_key': 'user'}, # What key in an event should we tie # to a key in the action stream? 'action': {'stream': CohortTest.FRONTPAGE_STREAM, # Stream users take actions on. 'transform': lambda x: x # Transformations on the stream. 'unit': DateUnit.XX, # Track events in day/week/months. 'repetitions': 14 # How many days/weeks/months to track. 'grouping_key': 'user_id'} # What key in an event should we tie # to a key in the action stream? } Output: A metis-compatible query plan to return a cohort analysis. """ cohort = plan['cohort'] action = plan['action'] source = plan['source'] # Calculate the start and end dates, in Kronos time, of the # beginning and end of the cohort and action streams that will be # relevant. cohort_start = datetime_to_kronos_time(_date_to_datetime(cohort['start'])) cohort_span = timedelta(**{cohort['unit']: cohort['cohorts']}) cohort_end = cohort['start'] + cohort_span action_span = timedelta(**{action['unit']: action['repetitions']}) action_end = cohort_end + action_span cohort_end = datetime_to_kronos_time(_date_to_datetime(cohort_end)) + 1 action_end = datetime_to_kronos_time(_date_to_datetime(action_end)) + 1 left = _cohort_stream_transform(source, cohort['stream'], cohort_start, cohort_end, cohort.get('transform'), cohort['grouping_key'], cohort['unit']) right = _cohort_stream_transform(source, action['stream'], cohort_start, action_end, action.get('transform'), action['grouping_key'], action['unit']) additional_action_time = (DateUnit.unit_to_kronos_time(action['unit']) * action['repetitions']) left.alias = 'cohort' right.alias = 'action' joined = Join(left, right, (Condition(Condition.Op.EQ, Property('cohort.%s' % cohort['grouping_key']), Property('action.%s' % action['grouping_key'])) & Condition(Condition.Op.GTE, Property('action.%s' % TIMESTAMP_FIELD), Property('cohort.%s' % TIMESTAMP_FIELD)) & Condition(Condition.Op.LT, Property('action.%s' % TIMESTAMP_FIELD), Add([Property('cohort.%s' % TIMESTAMP_FIELD), Constant(additional_action_time)])))) user_aggregated = Aggregate( joined, GroupBy([Property('cohort.date', alias=TIMESTAMP_FIELD), Property('cohort.%s' % cohort['grouping_key'], alias='group'), Floor([Subtract([Property('action.%s' % TIMESTAMP_FIELD), Property('cohort.%s' % TIMESTAMP_FIELD)]), Constant(DateUnit.unit_to_kronos_time(action['unit']))], alias='action_step')]), [Count([], alias='count')] ) aggregated = Aggregate( user_aggregated, GroupBy([Property(TIMESTAMP_FIELD, alias=TIMESTAMP_FIELD), Property('action_step', alias='action_step')]), [Count([], alias='cohort_actions')]) # TODO(marcua): Also sum up the cohort sizes, join with the plan. return aggregated.to_dict()
[ "def", "cohort_queryplan", "(", "plan", ")", ":", "cohort", "=", "plan", "[", "'cohort'", "]", "action", "=", "plan", "[", "'action'", "]", "source", "=", "plan", "[", "'source'", "]", "# Calculate the start and end dates, in Kronos time, of the", "# beginning and end of the cohort and action streams that will be", "# relevant.", "cohort_start", "=", "datetime_to_kronos_time", "(", "_date_to_datetime", "(", "cohort", "[", "'start'", "]", ")", ")", "cohort_span", "=", "timedelta", "(", "*", "*", "{", "cohort", "[", "'unit'", "]", ":", "cohort", "[", "'cohorts'", "]", "}", ")", "cohort_end", "=", "cohort", "[", "'start'", "]", "+", "cohort_span", "action_span", "=", "timedelta", "(", "*", "*", "{", "action", "[", "'unit'", "]", ":", "action", "[", "'repetitions'", "]", "}", ")", "action_end", "=", "cohort_end", "+", "action_span", "cohort_end", "=", "datetime_to_kronos_time", "(", "_date_to_datetime", "(", "cohort_end", ")", ")", "+", "1", "action_end", "=", "datetime_to_kronos_time", "(", "_date_to_datetime", "(", "action_end", ")", ")", "+", "1", "left", "=", "_cohort_stream_transform", "(", "source", ",", "cohort", "[", "'stream'", "]", ",", "cohort_start", ",", "cohort_end", ",", "cohort", ".", "get", "(", "'transform'", ")", ",", "cohort", "[", "'grouping_key'", "]", ",", "cohort", "[", "'unit'", "]", ")", "right", "=", "_cohort_stream_transform", "(", "source", ",", "action", "[", "'stream'", "]", ",", "cohort_start", ",", "action_end", ",", "action", ".", "get", "(", "'transform'", ")", ",", "action", "[", "'grouping_key'", "]", ",", "action", "[", "'unit'", "]", ")", "additional_action_time", "=", "(", "DateUnit", ".", "unit_to_kronos_time", "(", "action", "[", "'unit'", "]", ")", "*", "action", "[", "'repetitions'", "]", ")", "left", ".", "alias", "=", "'cohort'", "right", ".", "alias", "=", "'action'", "joined", "=", "Join", "(", "left", ",", "right", ",", "(", "Condition", "(", "Condition", ".", "Op", ".", "EQ", ",", "Property", "(", "'cohort.%s'", "%", "cohort", "[", "'grouping_key'", "]", ")", ",", "Property", "(", "'action.%s'", "%", "action", "[", "'grouping_key'", "]", ")", ")", "&", "Condition", "(", "Condition", ".", "Op", ".", "GTE", ",", "Property", "(", "'action.%s'", "%", "TIMESTAMP_FIELD", ")", ",", "Property", "(", "'cohort.%s'", "%", "TIMESTAMP_FIELD", ")", ")", "&", "Condition", "(", "Condition", ".", "Op", ".", "LT", ",", "Property", "(", "'action.%s'", "%", "TIMESTAMP_FIELD", ")", ",", "Add", "(", "[", "Property", "(", "'cohort.%s'", "%", "TIMESTAMP_FIELD", ")", ",", "Constant", "(", "additional_action_time", ")", "]", ")", ")", ")", ")", "user_aggregated", "=", "Aggregate", "(", "joined", ",", "GroupBy", "(", "[", "Property", "(", "'cohort.date'", ",", "alias", "=", "TIMESTAMP_FIELD", ")", ",", "Property", "(", "'cohort.%s'", "%", "cohort", "[", "'grouping_key'", "]", ",", "alias", "=", "'group'", ")", ",", "Floor", "(", "[", "Subtract", "(", "[", "Property", "(", "'action.%s'", "%", "TIMESTAMP_FIELD", ")", ",", "Property", "(", "'cohort.%s'", "%", "TIMESTAMP_FIELD", ")", "]", ")", ",", "Constant", "(", "DateUnit", ".", "unit_to_kronos_time", "(", "action", "[", "'unit'", "]", ")", ")", "]", ",", "alias", "=", "'action_step'", ")", "]", ")", ",", "[", "Count", "(", "[", "]", ",", "alias", "=", "'count'", ")", "]", ")", "aggregated", "=", "Aggregate", "(", "user_aggregated", ",", "GroupBy", "(", "[", "Property", "(", "TIMESTAMP_FIELD", ",", "alias", "=", "TIMESTAMP_FIELD", ")", ",", "Property", "(", "'action_step'", ",", "alias", "=", "'action_step'", ")", "]", ")", ",", "[", "Count", "(", "[", "]", ",", "alias", "=", "'cohort_actions'", ")", "]", ")", "# TODO(marcua): Also sum up the cohort sizes, join with the plan.", "return", "aggregated", ".", "to_dict", "(", ")" ]
45.266667
25.622222
def exposure_plot(self, places=-1, c_poly='default', c_holes='default', s_sop=25, extra_height=0.1): """ Plots the exposure of the sensible points in a space to the data and the Sun positions. It is required to previously compute the shadowing. If the computation has been made with a data timeseries, the plot will have a colorbar. Units are accumulated kilounits*hour (for the series), that is, if the input data is in Watts (irradiation) for a whole year, the output will be kWh received in an entire year. If there is no data inputed, the plot will show only the number of times each point "has been seen by the Sun" along the series. :param places: Indexes of the places to plot. If -1, plots all. :type places: int or list :param c_poly: Polygons color. :type c_poly: matplotlib color, 'default' or 't' (transparent) :param c_holes: Holes color. :type c_holes: matplotlib color, 'default' or 't' (transparent) :param s_sop: Set of points size. :type s_sop: float or ndarray :param extra_height: Extra elevation for the points in the plot. :type extra_height: float :returns: None """ import matplotlib.pyplot as plt import matplotlib.cm as cm import matplotlib.colors as mcolors sm = self.SM if sm.light_vor is None: raise ValueError('The shadowing has not been computed yet') proj_data = sm.proj_points*100/sm.proj_points.max() if places == -1: places = range(len(sm.space.places)) elif type(places) == int: places = [places] places = np.array(places) places[places<0] = len(sm.space.places) + places[places<0] places = np.unique(places) points = sm.space.get_sets_of_points() index = sm.space.get_sets_index() # Model plot sop = [] data = [] aux_space = pyny.Space() # Later centering of the plot ax=None for i in places: aux_space.add_places(sm.space[i]) ax = sm.space[i].iplot(c_poly=c_poly, c_holes=c_holes, c_sop=False, ret=True, ax=ax) sop.append(points[index==i]) data.append(proj_data[index==i]) sop = np.vstack(sop) sop = np.vstack((sop, np.array([-1e+12, -1e+12, -1e+12]))) data = np.hstack(data) proj_data = np.hstack((data, 0)) # Sensible points plot ## Color cmap = cm.jet normalize = mcolors.Normalize(vmin=proj_data.min(), vmax=proj_data.max()) color_vector = cmap(normalize(proj_data)) ## Plot ax.scatter(sop[:, 0], sop[:, 1], sop[:, 2]+extra_height, c=color_vector, s=s_sop) ## Axis aux_space.center_plot(ax) ## Colorbar scalarmappaple = cm.ScalarMappable(norm=normalize, cmap=cmap) scalarmappaple.set_array(proj_data) cbar = plt.colorbar(scalarmappaple, shrink=0.8, aspect=10) cbar.ax.set_ylabel('%', rotation=0) if not (sm.arg_data.max() == 1 and sm.arg_data.min() == 1): plt.title('Accumulated data Projection\nmax = ' + \ str(sm.proj_points.max()) + \ ' kilounits*hour') else: plt.title('Sun exposure')
[ "def", "exposure_plot", "(", "self", ",", "places", "=", "-", "1", ",", "c_poly", "=", "'default'", ",", "c_holes", "=", "'default'", ",", "s_sop", "=", "25", ",", "extra_height", "=", "0.1", ")", ":", "import", "matplotlib", ".", "pyplot", "as", "plt", "import", "matplotlib", ".", "cm", "as", "cm", "import", "matplotlib", ".", "colors", "as", "mcolors", "sm", "=", "self", ".", "SM", "if", "sm", ".", "light_vor", "is", "None", ":", "raise", "ValueError", "(", "'The shadowing has not been computed yet'", ")", "proj_data", "=", "sm", ".", "proj_points", "*", "100", "/", "sm", ".", "proj_points", ".", "max", "(", ")", "if", "places", "==", "-", "1", ":", "places", "=", "range", "(", "len", "(", "sm", ".", "space", ".", "places", ")", ")", "elif", "type", "(", "places", ")", "==", "int", ":", "places", "=", "[", "places", "]", "places", "=", "np", ".", "array", "(", "places", ")", "places", "[", "places", "<", "0", "]", "=", "len", "(", "sm", ".", "space", ".", "places", ")", "+", "places", "[", "places", "<", "0", "]", "places", "=", "np", ".", "unique", "(", "places", ")", "points", "=", "sm", ".", "space", ".", "get_sets_of_points", "(", ")", "index", "=", "sm", ".", "space", ".", "get_sets_index", "(", ")", "# Model plot\r", "sop", "=", "[", "]", "data", "=", "[", "]", "aux_space", "=", "pyny", ".", "Space", "(", ")", "# Later centering of the plot\r", "ax", "=", "None", "for", "i", "in", "places", ":", "aux_space", ".", "add_places", "(", "sm", ".", "space", "[", "i", "]", ")", "ax", "=", "sm", ".", "space", "[", "i", "]", ".", "iplot", "(", "c_poly", "=", "c_poly", ",", "c_holes", "=", "c_holes", ",", "c_sop", "=", "False", ",", "ret", "=", "True", ",", "ax", "=", "ax", ")", "sop", ".", "append", "(", "points", "[", "index", "==", "i", "]", ")", "data", ".", "append", "(", "proj_data", "[", "index", "==", "i", "]", ")", "sop", "=", "np", ".", "vstack", "(", "sop", ")", "sop", "=", "np", ".", "vstack", "(", "(", "sop", ",", "np", ".", "array", "(", "[", "-", "1e+12", ",", "-", "1e+12", ",", "-", "1e+12", "]", ")", ")", ")", "data", "=", "np", ".", "hstack", "(", "data", ")", "proj_data", "=", "np", ".", "hstack", "(", "(", "data", ",", "0", ")", ")", "# Sensible points plot\r", "## Color\r", "cmap", "=", "cm", ".", "jet", "normalize", "=", "mcolors", ".", "Normalize", "(", "vmin", "=", "proj_data", ".", "min", "(", ")", ",", "vmax", "=", "proj_data", ".", "max", "(", ")", ")", "color_vector", "=", "cmap", "(", "normalize", "(", "proj_data", ")", ")", "## Plot\r", "ax", ".", "scatter", "(", "sop", "[", ":", ",", "0", "]", ",", "sop", "[", ":", ",", "1", "]", ",", "sop", "[", ":", ",", "2", "]", "+", "extra_height", ",", "c", "=", "color_vector", ",", "s", "=", "s_sop", ")", "## Axis\r", "aux_space", ".", "center_plot", "(", "ax", ")", "## Colorbar\r", "scalarmappaple", "=", "cm", ".", "ScalarMappable", "(", "norm", "=", "normalize", ",", "cmap", "=", "cmap", ")", "scalarmappaple", ".", "set_array", "(", "proj_data", ")", "cbar", "=", "plt", ".", "colorbar", "(", "scalarmappaple", ",", "shrink", "=", "0.8", ",", "aspect", "=", "10", ")", "cbar", ".", "ax", ".", "set_ylabel", "(", "'%'", ",", "rotation", "=", "0", ")", "if", "not", "(", "sm", ".", "arg_data", ".", "max", "(", ")", "==", "1", "and", "sm", ".", "arg_data", ".", "min", "(", ")", "==", "1", ")", ":", "plt", ".", "title", "(", "'Accumulated data Projection\\nmax = '", "+", "str", "(", "sm", ".", "proj_points", ".", "max", "(", ")", ")", "+", "' kilounits*hour'", ")", "else", ":", "plt", ".", "title", "(", "'Sun exposure'", ")" ]
41.306818
17.511364
def delete_note(self, note_id, url='https://api.shanbay.com/bdc/note/{note_id}/'): """删除笔记""" url = url.format(note_id=note_id) return self._request(url, method='delete').json()
[ "def", "delete_note", "(", "self", ",", "note_id", ",", "url", "=", "'https://api.shanbay.com/bdc/note/{note_id}/'", ")", ":", "url", "=", "url", ".", "format", "(", "note_id", "=", "note_id", ")", "return", "self", ".", "_request", "(", "url", ",", "method", "=", "'delete'", ")", ".", "json", "(", ")" ]
43.4
11.8