Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
700
def predict_variant_effect_on_transcript_or_failure(variant, transcript): try: return predict_variant_effect_on_transcript( variant=variant, transcript=transcript) except (AssertionError, ValueError) as error: logger.warn( "Encountered error annotating %s...
Try predicting the effect of a variant on a particular transcript but suppress raised exceptions by converting them into `Failure` effect values.
701
def generate_safemode_windows(): try: import pyblish import pyblish_qml import PyQt5 except ImportError: return sys.stderr.write( "Run this in a terminal with access to " "the Pyblish libraries and PyQt5.\n") template = r values = {} f...
Produce batch file to run QML in safe-mode Usage: $ python -c "import compat;compat.generate_safemode_windows()" $ run.bat
702
def connect(self, interface=None): if self._simulate: return True if not interface: match = self._autodiscover_usb() self._log.debug("Auto-discovered USB port: %s" % match) else: self.USB_PORT = interface try: self._co...
Connect to the USB for the hottop. Attempt to discover the USB port used for the Hottop and then form a connection using the serial library. :returns: bool :raises SerialConnectionError:
703
def hash_for_signing(self, msg): magic = self.msg_magic_for_netcode() fd = io.BytesIO() stream_satoshi_string(fd, magic.encode()) stream_satoshi_string(fd, msg.encode()) return from_bytes_32(double_sha256(fd.getvalue()))
Return a hash of msg, according to odd bitcoin method: double SHA256 over a bitcoin encoded stream of two strings: a fixed magic prefix and the actual message.
704
def add_args(parser, positional=False): group = parser.add_argument_group("read loading") group.add_argument("reads" if positional else "--reads", nargs="+", default=[], help="Paths to bam files. Any number of paths may be specified.") group.add_argument( "--read-source-name", ...
Extends a commandline argument parser with arguments for specifying read sources.
705
def pkey(self): if self._pkey is None: self._pkey = self._get_pkey() return self._pkey
Returns the private key for quick authentication on the SSH server.
706
def rename(self, target): if self._closed: self._raise_closed() self._accessor.rename(self, target)
Rename this path to the given path.
707
def has_button(self, button): rc = self._libinput.libinput_device_pointer_has_button( self._handle, button) assert rc >= 0, return bool(rc)
Check if this device has a given button. Args: button (int): Button to check for, see ``input.h`` for button definitions. Returns: bool: :obj:`True` if the device has this button, :obj:`False` if it does not. Raises: AssertionError
708
def feedkeys(self, keys, options=, escape_csi=True): return self.request(, keys, options, escape_csi)
Push `keys` to Nvim user input buffer. Options can be a string with the following character flags: - 'm': Remap keys. This is default. - 'n': Do not remap keys. - 't': Handle keys as if typed; otherwise they are handled as if coming from a mapping. This matters for undo, ...
709
def convert_tensor_to_label(scope, operator, container): TensorToLabelab model_type = operator.raw_operator.WhichOneof() if model_type == : model = operator.raw_operator.neuralNetworkClassifier if model.WhichOneof() == : labels = list(s.encode() for s in model.stringClassLabels.v...
This converter tries to convert a dummy operator 'TensorToLabel' into a sequence of some ONNX operators. Those operators are used to extract the label with the highest probability for doing a prediction. We assume that the elements in the given probability tensor are aligned with the class labels specified in t...
710
def set_break( self, filename, lineno=None, temporary=False, cond=None, funcname=None ): log.info( % (filename, lineno, temporary, cond, funcname) ) breakpoint = self.get_break( filename, lineno, temporary, cond, funcname ...
Put a breakpoint for filename
711
def clean_translation(self): translation = self.cleaned_data[] if self.instance and self.instance.content_object: obj = self.instance.content_object field = obj._meta.get_field(self.instance.field) max_length = field.max_length if m...
Do not allow translations longer than the max_lenght of the field to be translated.
712
def update(self): self._attrs = self._session.refresh_attributes(self.name) self._attrs = assert_is_dict(self._attrs) if self.base_station: self.base_station.update()
Update object properties.
713
def changes(self): output = [] if self.status() is self.UNMODIFIED: output = [self.formatter % (, self.key, self.old_value)] elif self.status() is self.ADDED: output.append(self.formatter % (, self.key, self.new_value)) elif self.status() is self.REMOVED:...
Returns a list of changes to represent the diff between old and new value. Returns: list: [string] representation of the change (if any) between old and new value
714
def reload(self): self.rows = [] for r in self.iterload(): self.addRow(r)
Loads rows and/or columns. Override in subclass.
715
def lookup_hist(self, mh): result = mh.similar_blank_histogram() points = np.stack([mh.all_axis_bin_centers(i) for i in range(mh.dimensions)]).reshape(mh.dimensions, -1) values = self.lookup(*points) result.histogram = values.reshape(result.histogram.s...
Return histogram within binning of Histdd mh, with values looked up in this histogram. This is not rebinning: no interpolation /renormalization is performed. It's just a lookup.
716
def username(self): entry = self._proto.commandQueueEntry if entry.HasField(): return entry.username return None
The username of the issuer.
717
def calculate_file_distances(dicom_files, field_weights=None, dist_method_cls=None, **kwargs): if dist_method_cls is None: dist_method = LevenshteinDicomFileDistance(field_weights) else: try: dist_method = dist_method_cls(field_weights=field_weights,...
Calculates the DicomFileDistance between all files in dicom_files, using an weighted Levenshtein measure between all field names in field_weights and their corresponding weights. Parameters ---------- dicom_files: iterable of str Dicom file paths field_weights: dict of str to float ...
718
def pcolormesh(x, y, z, ax, infer_intervals=None, **kwargs): x = np.asarray(x) if infer_intervals is None: if hasattr(ax, ): if len(x.shape) == 1: infer_intervals = True else: infer_intervals = False else: infer_inter...
Pseudocolor plot of 2d DataArray Wraps :func:`matplotlib:matplotlib.pyplot.pcolormesh`
719
def execute_script(self, script, g=None): if not g==None: self.extra_globals.update(g) if not _s.fun.is_iterable(script): if script is None: return None [expression, v] = self._parse_script(script) ...
Runs a script, returning the result. Parameters ---------- script String script to be evaluated (see below). g=None Optional dictionary of additional globals for the script evaluation. These will automatically be inserted into self.extra_globals. ...
720
def from_euler312(self, roll, pitch, yaw): c3 = cos(pitch) s3 = sin(pitch) s2 = sin(roll) c2 = cos(roll) s1 = sin(yaw) c1 = cos(yaw) self.a.x = c1 * c3 - s1 * s2 * s3 self.b.y = c1 * c2 self.c.z = c3 * c2 self.a.y = -c2*s1 ...
fill the matrix from Euler angles in radians in 312 convention
721
def indexes_all(ol,value): aaaa length = ol.__len__() indexes =[] for i in range(0,length): if(value == ol[i]): indexes.append(i) else: pass return(indexes)
from elist.elist import * ol = [1,'a',3,'a',4,'a',5] indexes_all(ol,'a')
722
def put_file(self, in_path, out_path): try: st = os.stat(in_path) except OSError as e: self._throw_io_error(e, in_path) raise if not stat.S_ISREG(st.st_mode): raise IOError( % (in_path,)) if st.st_size ...
Implement put_file() by streamily transferring the file via FileService. :param str in_path: Local filesystem path to read. :param str out_path: Remote filesystem path to write.
723
def append(self, observation, action, reward, terminal, training=True): super(SequentialMemory, self).append(observation, action, reward, terminal, training=training) if training: self.observations.append(observation) self.actions.append(actio...
Append an observation to the memory # Argument observation (dict): Observation returned by environment action (int): Action taken to obtain this observation reward (float): Reward obtained by taking this action terminal (boolean): Is the state terminal
724
def _get_fullname(obj): if not hasattr(obj, "__name__"): obj = obj.__class__ if obj.__module__ in ("builtins", "__builtin__"): return obj.__name__ return "{}.{}".format(obj.__module__, obj.__name__)
Get the full name of an object including the module. Args: obj: An object. Returns: The full class name of the object.
725
def get_param(self, number): logger.debug("retreiving param number %s" % number) type_ = snap7.snap7types.param_types[number] value = type_() code = self.library.Par_GetParam(self.pointer, ctypes.c_int(number), ctypes.byref(value)) ...
Reads an internal Partner object parameter.
726
def save(self): self._ensure_have_load_only() for fname, parser in self._modified_parsers: logger.info("Writing to %s", fname) ensure_dir(os.path.dirname(fname)) with open(fname, "w") as f: parser.write(f)
Save the currentin-memory state.
727
def validate_token(refresh_url, exceptions=(), callback=None, access_key=, refresh_key=): def _validate_token(f): def wrapper(*args): self = args[0] try: return f(*args) except exceptions: try: se...
a decorator used to validate the access_token for oauth based data sources. This decorator should be used on every method in the data source that fetches data from the oauth controlled resource, and that relies on a valid access_token in order to operate properly. If the token is valid, the normal f...
728
def filter_service_by_host_bp_rule_label(label): def inner_filter(items): service = items["service"] host = items["hosts"][service.host] if service is None or host is None: return False return label in host.labels return inner_filter
Filter for service Filter on label :param label: label to filter :type label: str :return: Filter :rtype: bool
729
def add_virtual_columns_aitoff(self, alpha, delta, x, y, radians=True): transform = "" if radians else "*pi/180." aitoff_alpha = "__aitoff_alpha_%s_%s" % (alpha, delta) aitoff_alpha = re.sub("[^a-zA-Z_]", "_", aitoff_alpha) self.add_virtual_column(aitoff_alpha, "arccos...
Add aitoff (https://en.wikipedia.org/wiki/Aitoff_projection) projection :param alpha: azimuth angle :param delta: polar angle :param x: output name for x coordinate :param y: output name for y coordinate :param radians: input and output in radians (True), or degrees (False) ...
730
def override_account_fields(self, settled_cash=not_overridden, accrued_interest=not_overridden, buying_power=not_overridden, equity_with_loan=not_overridden, to...
Override fields on ``self.account``.
731
def get_changes(self, changers, in_hierarchy=False, resources=None, task_handle=taskhandle.NullTaskHandle()): function_changer = _FunctionChangers(self.pyname.get_object(), self._definfo(), changers) return self._change_calls(func...
Get changes caused by this refactoring `changers` is a list of `_ArgumentChanger`\s. If `in_hierarchy` is `True` the changers are applyed to all matching methods in the class hierarchy. `resources` can be a list of `rope.base.resource.File`\s that should be searched for occurre...
732
def ltime(etobs, obs, direct, targ): etobs = ctypes.c_double(etobs) obs = ctypes.c_int(obs) direct = stypes.stringToCharP(direct) targ = ctypes.c_int(targ) ettarg = ctypes.c_double() elapsd = ctypes.c_double() libspice.ltime_c(etobs, obs, direct, targ, ctypes.byref(ettarg), ...
This routine computes the transmit (or receive) time of a signal at a specified target, given the receive (or transmit) time at a specified observer. The elapsed time between transmit and receive is also returned. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ltime_c.html :param etobs: E...
733
def _feed_calendar_span(gtfs, stats): n_feeds = _n_gtfs_sources(gtfs)[0] max_start = None min_end = None if n_feeds > 1: for i in range(n_feeds): feed_key = "feed_" + str(i) + "_" start_key = feed_key + "calendar_start" end_key = feed_key + "calendar_end"...
Computes the temporal coverage of each source feed Parameters ---------- gtfs: gtfspy.GTFS object stats: dict where to append the stats Returns ------- stats: dict
734
def send(self, sender: PytgbotApiBot): return sender.send_voice( voice=self.voice, chat_id=self.receiver, reply_to_message_id=self.reply_id, caption=self.caption, parse_mode=self.parse_mode, duration=self.duration, disable_notification=self.disable_notification, reply_markup=se...
Send the message via pytgbot. :param sender: The bot instance to send with. :type sender: pytgbot.bot.Bot :rtype: PytgbotApiMessage
735
def initializerepo(self): try: os.mkdir(self.repopath) except OSError: pass cmd = self.repo.init(bare=self.bare, shared=self.shared) if not self.bare: self.write_testing_data([], []) self.write_training_data([], []) ...
Fill empty directory with products and make first commit
736
def binomial_coefficient(n, k): if not isinstance(k, int) or not isinstance(n, int): raise TypeError("Expecting positive integers") if k > n: raise ValueError("k must be lower or equal than n") if k < 0 or n < 0: raise ValueError("Expecting positive integers") return fact...
Calculate the binomial coefficient indexed by n and k. Args: n (int): positive integer k (int): positive integer Returns: The binomial coefficient indexed by n and k Raises: TypeError: If either n or k is not an integer ValueError: If either n or k is negative, or ...
737
def writeDetails(accept, readId, taxonomy, fp): fp.write( % ( if accept else , readId, .join(taxonomy) if taxonomy else ))
Write read and taxonomy details. @param accept: A C{bool} indicating whether the read was accepted, according to its taxonomy. @param readId: The C{str} id of the read. @taxonomy: A C{list} of taxonomy C{str} levels. @fp: An open file pointer to write to.
738
def user_warning(self, message, caption=): dlg = wx.MessageDialog(self, message, caption, wx.OK | wx.CANCEL | wx.ICON_WARNING) if self.show_dlg(dlg) == wx.ID_OK: continue_bool = True else: continue_bool = False dlg.Destroy()...
Shows a dialog that warns the user about some action Parameters ---------- message : message to display to user caption : title for dialog (default: "Warning!") Returns ------- continue_bool : True or False
739
def get_partitions(self, persistence=None): if persistence is None: persistence = self.persistence partitions = {} for key, items in self.base_partitions.items(): new_key = key while ( ...
Returns the partitioned data based on a specified persistence level. @ In, persistence, a floating point value specifying the size of the smallest feature we want to track. Default = None means consider all features. @ Out, a dictionary lists where each key is...
740
def _get_step_inout(step): inputs = [] outputs = [] prescatter = collections.defaultdict(list) remapped = {} assert step.outputs_record_schema["type"] == "record" output_names = set([]) for outp in step.outputs_record_schema["fields"]: outputs.append({"id": outp["name"]}) ...
Retrieve set of inputs and outputs connecting steps.
741
def set_name(address, name, anyway=False): success = idaapi.set_name(address, name, idaapi.SN_NOWARN | idaapi.SN_NOCHECK) if success: return if anyway: success = idaapi.do_name_anyway(address, name) if success: return raise exceptions.SarkSetNameFailed("Fai...
Set the name of an address. Sets the name of an address in IDA. If the name already exists, check the `anyway` parameter: True - Add `_COUNTER` to the name (default IDA behaviour) False - Raise an `exceptions.SarkErrorNameAlreadyExists` exception. Args address: The address to ren...
742
def _convert_etree_element_to_rule(entry_element): http://www.w3.org/2005/Atomapplication/xmlXYZABC rule = Rule() rule_element = entry_element.find(, _etree_sb_feed_namespaces) if rule_element is not None: filter_element = rule_element.find(, _etree_sb_feed_namespaces) if filter_element...
Converts entry element to rule object. The format of xml for rule: <entry xmlns='http://www.w3.org/2005/Atom'> <content type='application/xml'> <RuleDescription xmlns:i="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schemas.microsoft.com/netservices/2010/10/servicebus/connect"> <Filter i:typ...
743
def write_reports(self, relative_path, suite_name, reports, package_name=None): dest_path = self.reserve_file(relative_path) with open(dest_path, ) as outf: outf.write(toxml(reports, suite_name, package_name=package_name)) return dest_path
write the collection of reports to the given path
744
def get_esri_extent(esriobj): extent = None srs = None if in esriobj._json_struct: extent = esriobj._json_struct[] if in esriobj._json_struct: extent = esriobj._json_struct[] try: srs = extent[][] except KeyError, err: LOGGER.error(err, exc_info=True) ...
Get the extent of an ESRI resource
745
def query_string(context, **kwargs): params = context["request"].GET.copy() for key, value in list(kwargs.items()): params[key] = value return "?" + params.urlencode()
Add param to the given query string
746
def json_dumps(obj, **kwargs ): if six.PY2: kwargs[] = return json.dumps(convert_to_dict(obj), **kwargs)
Force use of unicode.
747
def to_dict(self): return { : self.content, : [r._asdict() for r in self.resources], : self.js_init_fn, : self.js_init_version, : self.json_init_args }
Returns the fragment in a dictionary representation.
748
def frame_paths(frame_type, start_time, end_time, server=None, url_type=): site = frame_type[0] connection = datafind_connection(server) connection.find_times(site, frame_type, gpsstart=start_time, gpsend=end_time) cache = connection.find_frame_urls(site, frame_type, start...
Return the paths to a span of frame files Parameters ---------- frame_type : string The string representation of the frame type (ex. 'H1_ER_C00_L1') start_time : int The start time that we need the frames to span. end_time : int The end time that we need the frames to span. ...
749
def get_valid_cwd(): try: cwd = _current_dir() except: warn("Your current directory is invalid. If you open a ticket at " + "https://github.com/milkbikis/powerline-shell/issues/new " + "we would love to help fix the issue.") sys.stdout.write("> ") sys...
Determine and check the current working directory for validity. Typically, an directory arises when you checkout a different branch on git that doesn't have this directory. When an invalid directory is found, a warning is printed to the screen, but the directory is still returned as-is, since this is w...
750
def remove_foreign_key(self, name): name = self._normalize_identifier(name) if not self.has_foreign_key(name): raise ForeignKeyDoesNotExist(name, self._name) del self._fk_constraints[name]
Removes the foreign key constraint with the given name. :param name: The constraint name :type name: str
751
def store_zonefiles( self, zonefile_names, zonefiles, zonefile_txids, zonefile_block_heights, peer_zonefile_hashes, peer_hostport, path, con=None ): ret = [] with AtlasDBOpen(con=con, path=path) as dbcon: for fetched_zfhash, zonefile_txt in zonefiles.items(): ...
Store a list of RPC-fetched zonefiles (but only ones in peer_zonefile_hashes) from the given peer_hostport Return the list of zonefile hashes stored.
752
def _adjust_rowcol(self, insertion_point, no_to_insert, axis, tab=None): if axis == 2: self._shift_rowcol(insertion_point, no_to_insert) return assert axis in (0, 1) cell_sizes = self.col_widths if axis else self.row_heights set_cell_size = self.set_co...
Adjusts row and column sizes on insertion/deletion
753
def stream_bloom_filters(dataset, keys, schema ): tokenizers = [tokenizer.get_tokenizer(field.hashing_properties) for field in schema.fields] return (crypto_bloom_filter(s, tokenizers, schema, keys)...
Compute composite Bloom filters (CLKs) for every record in an iterable dataset. :param dataset: An iterable of indexable records. :param schema: An instantiated Schema instance :param keys: A tuple of two lists of secret keys used in the HMAC. :return: Generator yielding bloom f...
754
def when(self, *bools): self.passes = self.passes and all(bools) return self
:type bools: bool :param bools: Boolean arguments All boolean arguments passed to this method must evaluate to `True` for printing to be enabled. So for example, the following code would print ``x: 1`` .. code-block:: python for x in range(10): Behol...
755
def lp_to_simple_rdd(lp_rdd, categorical=False, nb_classes=None): if categorical: if not nb_classes: labels = np.asarray(lp_rdd.map( lambda lp: lp.label).collect(), dtype=) nb_classes = np.max(labels) + 1 rdd = lp_rdd.map(lambda lp: (from_vector(lp.featur...
Convert a LabeledPoint RDD into an RDD of feature-label pairs :param lp_rdd: LabeledPoint RDD of features and labels :param categorical: boolean, if labels should be one-hot encode when returned :param nb_classes: int, number of total classes :return: Spark RDD with feature-label pairs
756
def tradeBreaksSSE(symbols=None, on_data=None, token=, version=): return _runSSE(, symbols, on_data, token, version)
Trade report messages are sent when an order on the IEX Order Book is executed in whole or in part. DEEP sends a Trade report message for every individual fill. https://iexcloud.io/docs/api/#deep-trades Args: symbols (string); Tickers to request on_data (function): Callback on data tok...
757
def get_assessments_taken_by_search(self, assessment_taken_query, assessment_taken_search): if not self._can(): raise PermissionDenied() return self._provider_session.get_assessments_taken_by_search(assessment_taken_query, assessment_taken_search)
Pass through to provider AssessmentTakenSearchSession.get_assessments_taken_by_search
758
def pre_save(self, model_instance, add): value = super().pre_save(model_instance, add) if isinstance(value, LocalizedValue): for file in value.__dict__.values(): if file and not file._committed: file.save(file.name, file, save=False) retur...
Returns field's value just before saving.
759
def patch_stackless(): global _application_set_schedule_callback _application_set_schedule_callback = stackless.set_schedule_callback(_schedule_callback) def set_schedule_callback(callable): global _application_set_schedule_callback old = _application_set_schedule_callback _app...
This function should be called to patch the stackless module so that new tasklets are properly tracked in the debugger.
760
def to_api_repr(self): configuration = self._configuration.to_api_repr() resource = { "jobReference": self._properties["jobReference"], "configuration": configuration, } configuration["query"]["query"] = self.query return resource
Generate a resource for :meth:`_begin`.
761
def file_compile(self, path): log.info(+path) cmd = % path res = self.__exchange(cmd) log.info(res) return res
Compiles a file specified by path on the device
762
def dim_range_key(eldim): if isinstance(eldim, dim): dim_name = repr(eldim) if dim_name.startswith(""): dim_name = dim_name[1:-1] else: dim_name = eldim.name return dim_name
Returns the key to look up a dimension range.
763
def hysteresis_magic2(path_to_file=, hyst_file="rmag_hysteresis.txt", save=False, save_folder=, fmt="svg", plots=True): user, meas_file, rmag_out, rmag_file = "", "agm_measurements.txt", "rmag_hysteresis.txt", "" pltspec = "" dir_path = save_folder verbos...
Calculates hysteresis parameters, saves them in rmag_hysteresis format file. If selected, this function also plots hysteresis loops, delta M curves, d (Delta M)/dB curves, and IRM backfield curves. Parameters (defaults are used if not specified) ---------- path_to_file : path to directory that cont...
764
def is_secret_registered( self, secrethash: SecretHash, block_identifier: BlockSpecification, ) -> bool: if not self.client.can_query_state_for_block(block_identifier): raise NoStateForBlockIdentifier() block = self.get_secret_registration_bl...
True if the secret for `secrethash` is registered at `block_identifier`. Throws NoStateForBlockIdentifier if the given block_identifier is older than the pruning limit
765
def import_surf_mesh(file_name): raw_content = read_file(file_name) raw_content = raw_content.split("\n") content = [] for rc in raw_content: temp = rc.strip().split() content.append(temp) if int(content[0][0]) != 3: raise TypeError("Input mesh must be 3-dimension...
Generates a NURBS surface object from a mesh file. :param file_name: input mesh file :type file_name: str :return: a NURBS surface :rtype: NURBS.Surface
766
def clean_whitespace(string, compact=False): for a, b in ((, ), (, ), (, ), (, ), (, )): string = string.replace(a, b) if compact: for a, b in ((, ), (, ), (, ), (, ), (, )): string = string.replace(a, b) return string.strip()
Return string with compressed whitespace.
767
def _value_and_batch_jacobian(f, x): if tf.executing_eagerly(): with tf.GradientTape() as tape: tape.watch(x) value = f(x) batch_jacobian = tape.batch_jacobian(value, x) else: value = f(x) batch_jacobian = gradients.batch_jacobian(value, x) return value, batch_jacobian
Enables uniform interface to value and batch jacobian calculation. Works in both eager and graph modes. Arguments: f: The scalar function to evaluate. x: The value at which to compute the value and the batch jacobian. Returns: A tuple (f(x), J(x)), where J(x) is the batch jacobian.
768
def create_table(table, data): fields = data[] query = indexed_fields = for key, value in fields.items(): non_case_field = value[0][0:value[0].find()] if non_case_field == : sign = value[0][value[0].find() + 1:-1:].strip() if sign == : fi...
Create table with defined name and fields :return: None
769
def authenticate_request(self, method, bucket=, key=, headers=None): path = self.conn.calling_format.build_path_base(bucket, key) auth_path = self.conn.calling_format.build_auth_path(bucket, key) http_request = boto.connection.AWSAuthConnection.build_base_http_request(...
Authenticate a HTTP request by filling in Authorization field header. :param method: HTTP method (e.g. GET, PUT, POST) :param bucket: name of the bucket. :param key: name of key within bucket. :param headers: dictionary of additional HTTP headers. :return: boto.connection.HTTPR...
770
def RegisterSourceType(cls, source_type_class): if source_type_class.TYPE_INDICATOR in cls._source_type_classes: raise KeyError( .format( source_type_class.TYPE_INDICATOR)) cls._source_type_classes[source_type_class.TYPE_INDICATOR] = ( source_type_class)
Registers a source type. Source types are identified based on their type indicator. Args: source_type_class (type): source type. Raises: KeyError: if source types is already set for the corresponding type indicator.
771
def _find_usages_vpn_gateways(self): vpngws = self.conn.describe_vpn_gateways(Filters=[ { : , : [ , ] } ])[] self.limits[]._add_current_usage( len(vpng...
find usage of vpn gateways
772
def post(self, *args, **kwargs): json_data = request.get_json() or {} relationship_field, model_relationship_field, related_type_, related_id_field = self._get_relationship_data() if not in json_data: raise BadRequest(, source={: }) if isinstance(json_data[], dict...
Add / create relationship(s)
773
def AIMAFile(components, mode=): "Open a file based at the AIMA root directory." import utils dir = os.path.dirname(utils.__file__) return open(apply(os.path.join, [dir] + components), mode)
Open a file based at the AIMA root directory.
774
def _in_header(self, col): if not self._has_header: icol_ex = pexdoc.exh.addex(RuntimeError, "Invalid column specification") hnf_ex = pexdoc.exh.addex(ValueError, "Column *[column_identifier]* not found") col_list = [col] if is...
Validate column identifier(s).
775
def create_issue(self, data, params=None): return self._post(self.API_URL + , data=data, params=params)
Creates an issue or a sub-task from a JSON representation. You can provide two parameters in request's body: update or fields. The fields, that can be set on an issue create operation, can be determined using the /rest/api/2/issue/createmeta resource. If a particular field is not configured to ...
776
def send (self, command, *args, **kwargs): status = False cmdobj = self._cmddict.create(command, *args, **kwargs) messages = [] if not cmdobj.validate(messages): for msg in messages: log.error(msg) else: encoded = cmdobj.encod...
Creates, validates, and sends the given command as a UDP packet to the destination (host, port) specified when this CmdAPI was created. Returns True if the command was created, valid, and sent, False otherwise.
777
def _all_arcs(self): arcs = set() for bp in self.child_parsers(): arcs.update(bp._arcs()) return arcs
Get the set of all arcs in this code object and its children. See `_arcs` for details.
778
def add_observee_with_credentials(self, user_id, access_token=None, observee_password=None, observee_unique_id=None): path = {} data = {} params = {} path["user_id"] = user_id if observee_unique_id is not None: ...
Add an observee with credentials. Register the given user to observe another user, given the observee's credentials. *Note:* all users are allowed to add their own observees, given the observee's credentials or access token are provided. Administrators can add observees given cred...
779
def _check_input_files(nspc, parser): if not len(nspc.filenames) == 3: parser.print_help() msg = .format(len(nspc.filenames), .join(nspc.filenames)) raise Exception(msg) for fin in nspc.filenames: if not os.path.exists(fin): return...
check filename args. otherwise if one of the 3 filenames is bad it's hard to tell which one
780
def actnorm_center(name, x, reverse=False, init=False): shape = common_layers.shape_list(x) with tf.variable_scope(name, reuse=tf.AUTO_REUSE): assert len(shape) == 2 or len(shape) == 4 if len(shape) == 2: x_mean = tf.reduce_mean(x, [0], keepdims=True) b = get_variable_ddi("b", (1, shape[1]), ...
Add a bias to x. Initialize such that the output of the first minibatch is zero centered per channel. Args: name: scope x: 2-D or 4-D Tensor. reverse: Forward or backward operation. init: data-dependent initialization. Returns: x_center: (x + b), if reverse is True and (x - b) otherwise.
781
def reset(db_name): conn = psycopg2.connect(database=) db = Database(db_name) conn.autocommit = True with conn.cursor() as cursor: cursor.execute(db.drop_statement()) cursor.execute(db.create_statement()) conn.close()
Reset database.
782
def mavlink_packet(self, m): if m.get_type() == : if not self.packet_is_for_me(m): self.dropped += 1 return if self.sender is None and m.seqno == 0: if self.log_settings.verbose: print("DFLogger: Received data ...
handle mavlink packets
783
def _dmi_cast(key, val, clean=True): if clean and not _dmi_isclean(key, val): return elif not re.match(r, key, flags=re.IGNORECASE): if in val: val = [el.strip() for el in val.split()] else: try: val = int(val) except Exception: ...
Simple caster thingy for trying to fish out at least ints & lists from strings
784
def _write_plan(self, stream): if self.plan is not None: if not self._plan_written: print("1..{0}".format(self.plan), file=stream) self._plan_written = True
Write the plan line to the stream. If we have a plan and have not yet written it out, write it to the given stream.
785
def _theorem5p4(adj, ub): new_edges = set() for u, v in itertools.combinations(adj, 2): if u in adj[v]: continue if len(adj[u].intersection(adj[v])) > ub: new_edges.add((u, v)) while new_edges: for u, v in new_edges: adj[u].add(...
By Theorem 5.4, if any two vertices have ub + 1 common neighbors then we can add an edge between them.
786
def ancestors(self, cl=None, noduplicates=True): if not cl: cl = self if cl.parents(): bag = [] for x in cl.parents(): if x.uri != cl.uri: bag += [x] + self.ancestors(x, noduplicates) else: ...
returns all ancestors in the taxonomy
787
def screenshot(path=None): if not _rootinitialized: raise TDLError() if isinstance(path, str): _lib.TCOD_sys_save_screenshot(_encodeString(path)) elif path is None: filelist = _os.listdir() n = 1 filename = % n while filename in filelist: n ...
Capture the screen and save it as a png file. If path is None then the image will be placed in the current folder with the names: ``screenshot001.png, screenshot002.png, ...`` Args: path (Optional[Text]): The file path to save the screenshot.
788
def _clear_pattern(self): self.__interval = None self.__days_of_week = set() self.__first_day_of_week = None self.__day_of_month = None self.__month = None self.__index = self.__start_date = None self.__end_date = None s...
Clears this event recurrence
789
def _is_valid_ins(self, ins_ir): invalid_instrs = [ ReilMnemonic.JCC, ReilMnemonic.UNDEF, ReilMnemonic.UNKN, ] return not any([i.mnemonic in invalid_instrs for i in ins_ir])
Check for instruction validity as a gadgets.
790
def register(self, classes=[]): if not isinstance(classes, list): raise AttributeError("plugins must be a list, not %s." % type(classes)) plugin_registered = [] for plugin_class in classes: plugin_name = plugin_class.__name__ self.register_class(plu...
Registers new plugins. The registration only creates a new entry for a plugin inside the _classes dictionary. It does not activate or even initialise the plugin. A plugin must be a class, which inherits directly or indirectly from GwBasePattern. :param classes: List of plugin classes ...
791
def delete(self): method = _dopost(method, auth=True, photoset_id=self.id) return True
Deletes the photoset.
792
def publish_avatar_set(self, avatar_set): id_ = avatar_set.png_id done = False with (yield from self._publish_lock): if (yield from self._pep.available()): yield from self._pep.publish( namespaces.xep0084_data, avatar_...
Make `avatar_set` the current avatar of the jid associated with this connection. If :attr:`synchronize_vcard` is true and PEP is available the vCard is only synchronized if the PEP update is successful. This means publishing the ``image/png`` avatar data and the avatar metadata...
793
def flush(self, stats, cs_status=None): self.erase() self.display(stats, cs_status=cs_status)
Clear and update the screen. stats: Stats database to display cs_status: "None": standalone or server mode "Connected": Client is connected to the server "Disconnected": Client is disconnected from the server
794
def write(self, fptr): self._validate(writing=True) num_components = len(self.association) fptr.write(struct.pack(, 8 + 2 + num_components * 6, b)) fptr.write(struct.pack(, num_components)) for j in range(num_components): fptr.write(struct.pack( + * 3, ...
Write a channel definition box to file.
795
def find_argname(self, argname, rec=False): if self.args: return _find_arg(argname, self.args, rec) return None, None
Get the index and :class:`AssignName` node for given name. :param argname: The name of the argument to search for. :type argname: str :param rec: Whether or not to include arguments in unpacked tuples in the search. :type rec: bool :returns: The index and node for ...
796
async def delTrigger(self, iden): trig = self.cell.triggers.get(iden) self._trig_auth_check(trig.get()) self.cell.triggers.delete(iden)
Deletes a trigger from the cortex
797
def updateDynamics(self): history_vars_string = arg_names = list(getArgNames(self.calcDynamics)) if in arg_names: arg_names.remove() for name in arg_names: history_vars_string += \ + name + update_dict = eval( + history_vars_string + )...
Calculates a new "aggregate dynamic rule" using the history of variables named in track_vars, and distributes this rule to AgentTypes in agents. Parameters ---------- none Returns ------- dynamics : instance The new "aggregate dynamic rule" that agen...
798
def from_pycbc(cls, fs, copy=True): return cls(fs.data, f0=0, df=fs.delta_f, epoch=fs.epoch, copy=copy)
Convert a `pycbc.types.frequencyseries.FrequencySeries` into a `FrequencySeries` Parameters ---------- fs : `pycbc.types.frequencyseries.FrequencySeries` the input PyCBC `~pycbc.types.frequencyseries.FrequencySeries` array copy : `bool`, optional, defaul...
799
def _create_datadict(cls, internal_name): if internal_name == "LOCATION": return Location() if internal_name == "DESIGN CONDITIONS": return DesignConditions() if internal_name == "TYPICAL/EXTREME PERIODS": return TypicalOrExtremePeriods() if i...
Creates an object depending on `internal_name` Args: internal_name (str): IDD name Raises: ValueError: if `internal_name` cannot be matched to a data dictionary object