code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def test_attr(self): <NEW_LINE> <INDENT> ev = self._ev() <NEW_LINE> self.assertRaises(AttributeError, lambda: ev.foo) <NEW_LINE> self.assertRaises(KeyError, lambda: ev['foo']) <NEW_LINE> ev.foo = 'bar' <NEW_LINE> self.assertEqual('bar', ev.foo) <NEW_LINE> self.assertEqual('bar', ev['foo']) <NEW_LINE> self.assertRaises(... | Attibutes and indexed keys are equivalent. | 625941bd6fb2d068a760ef8d |
def daterange(start, stop, steps): <NEW_LINE> <INDENT> delta = (stop - start) / steps <NEW_LINE> current = start <NEW_LINE> while current + delta <= stop: <NEW_LINE> <INDENT> yield current, current + delta <NEW_LINE> current += delta | A generator for stepping through time. | 625941bdb57a9660fec33774 |
def test_bytes_rw(self): <NEW_LINE> <INDENT> jobj = self.read_file("testBytes.ser") <NEW_LINE> pobj = javaobj.loads(jobj) <NEW_LINE> _logger.debug("Read bytes: %s", pobj) <NEW_LINE> self.assertEqual(pobj, "HelloWorld") <NEW_LINE> self._try_marshalling(jobj, pobj) | Reads testBytes.ser and checks the serialization process | 625941bd76d4e153a657ea23 |
def combine_moves(board_state_val, x, y, new_x, new_y, x2, y2, new_x2, new_y2): <NEW_LINE> <INDENT> board_state = copy.deepcopy(board_state_val) <NEW_LINE> player_val = board_state[x][y] <NEW_LINE> ai_val = board_state[x2][y2] <NEW_LINE> if new_x == new_x2 and new_y == new_y2: <NEW_LINE> <INDENT> piece_type1 = board_st... | Combines two move onto a given board state without any drawing functionality
Uses the rules of simultaneous movement in Apocalypse when combining the moves
:param board_state_val: **multi-dimensional list** Board state
:param x: **int** current x coord of the first piece to move
:param y: **int** current y coord of th... | 625941bdde87d2750b85fc82 |
def index_queryset(self, using=None): <NEW_LINE> <INDENT> return Test.objects.filter(pub_date__lte=datetime.datetime.now()) | Used when the entire index for model is updated. | 625941bd16aa5153ce36236b |
def createProfile(colorSpace, colorTemp=-1): <NEW_LINE> <INDENT> if colorSpace not in ["LAB", "XYZ", "sRGB"]: <NEW_LINE> <INDENT> raise PyCMSError("Color space not supported for on-the-fly profile creation (%s)" % colorSpace) <NEW_LINE> <DEDENT> if colorSpace == "LAB": <NEW_LINE> <INDENT> if type(colorTemp) == type(500... | ImageCms.createProfile(colorSpace, [colorTemp])
Returns a CmsProfile class object
colorSpace = string, the color space of the profile you wish to create.
Currently only "LAB", "XYZ", and "sRGB" are supported.
colorTemp = positive integer for the white point for the profile, in
degrees Kelvin (i.e. 5000, 6500,... | 625941bdb5575c28eb68def1 |
def load_model(self, verbose, dummy, ratio): <NEW_LINE> <INDENT> if self.initialized: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.verbose = verbose <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("Initializing keras model...") <NEW_LINE> <DEDENT> keras_graph = Graph() <NEW_LINE> with keras_graph.as_defaul... | Load the Keras Model | 625941bd8e71fb1e9831d69d |
def search(self, locationpath_string, details=False): <NEW_LINE> <INDENT> locationpath = parse(locationpath_string) <NEW_LINE> if details: <NEW_LINE> <INDENT> return [(str(x.locationpath), x.dump()) for x in self._search(locationpath)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [x.dump() for x in self._search... | Search target path and get json data list.
Args:
locationpath_string(str): XPath format search string.
details(bool): Return searched path with value,
default: False(value only).
Returns:
list: List of json data at target path (details=False).
With details True, list of set like
... | 625941bdec188e330fd5a697 |
def __init__(self, movie, ad_reel=None): <NEW_LINE> <INDENT> self.movie = movie <NEW_LINE> self.ad_reel = ad_reel | movie and ad_reel are initialised | 625941bd44b2445a33931f92 |
def database_load(name): <NEW_LINE> <INDENT> accessfile = os.path.join(os.path.dirname(os.path.abspath(__file__)), '.mdcs') <NEW_LINE> if os.path.isfile(accessfile): <NEW_LINE> <INDENT> with open(accessfile) as fp: <NEW_LINE> <INDENT> access_info = json.load(fp) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ra... | Loads saved database system information based on the database's assigned
name. If password is not saved, a prompt will ask for it.
Parameters
----------
name : str
The name assigned to stored access information.
Returns
-------
MDCS
An MDCS object with the loaded database access information. | 625941bd460517430c39407f |
@blueprint.route('/ajax/editor/save', methods=['GET', 'POST']) <NEW_LINE> @PageView.logged <NEW_LINE> def save_post(): <NEW_LINE> <INDENT> data = request.get_json() <NEW_LINE> path = data['path'] <NEW_LINE> prefixes = current_app.config['WEB_EDITOR_PREFIXES'] <NEW_LINE> if prefixes == []: <NEW_LINE> <INDENT> raise Exce... | Save the post | 625941bda79ad161976cc038 |
def rainbow(strip, wait_ms=55, iterations=1): <NEW_LINE> <INDENT> for j in range(256*iterations): <NEW_LINE> <INDENT> for i in range(strip.count): <NEW_LINE> <INDENT> strip.setPixelColor(i, wheel((i+j) & 255)) <NEW_LINE> <DEDENT> strip.show() <NEW_LINE> time.sleep(wait_ms/1000.0) | Draw rainbow that fades across all pixels at once. | 625941bd38b623060ff0ace1 |
def get_or_create_user(self, cas_data, **overrides): <NEW_LINE> <INDENT> user_model = get_user_model() <NEW_LINE> username = cas_data['username'] <NEW_LINE> try: <NEW_LINE> <INDENT> return user_model.objects.get(username=username) <NEW_LINE> <DEDENT> except user_model.DoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <... | Get user.
``cas_data`` must contain a 'username' key. If the corresponding
user already exists, it will be returned as is; if it doesn't, a
new user record will be created and returned.
.. note:: The ``CAS.auto_create_user`` setting can be set to
``False`` to disable the auto-creation of users.
``overrides... | 625941bd50812a4eaa59c217 |
def play(self): <NEW_LINE> <INDENT> open_movies_page(self.movies) | Use fresh_tomatoes.open_movies_page to generate the page, and open it in browser. | 625941bd5fcc89381b1e15b0 |
def coords2Bearing(self, coordsA, coordsB): <NEW_LINE> <INDENT> startLat = math.radians(float(coordsA[0])) <NEW_LINE> startLong = math.radians(float(coordsA[1])) <NEW_LINE> endLat = math.radians(float(coordsB[0])) <NEW_LINE> endLong = math.radians(float(coordsB[1])) <NEW_LINE> dLong = endLong - startLong <NEW_LINE> dPh... | Get a bearing given two sets of GPS coords, assuming A is the beginning coordinate in the line segment and B is the last coordinate received. Returns a floating point int. | 625941bd377c676e9127209d |
def calculate_spo2(self): <NEW_LINE> <INDENT> self.ir_ac2_sum += self.ir_ac ** 2 <NEW_LINE> self.red_ac2_sum += self.red_ac ** 2 <NEW_LINE> self.samples_recorded += 1 <NEW_LINE> if self.beats_detected == SPO2_N_BEATS: <NEW_LINE> <INDENT> self.ac_sq_ratio = 100.0 * math.log(self.red_ac2_sum/self.samples_recorded) / math... | Calculates the SPO2 value (Not sure how reliable this is)
Source: https://github.com/oxullo/Arduino-MAX30100 | 625941bd2eb69b55b151c79f |
def run_kmeans(term_freq_matrix, num_clusters, dist_metric, term_cond='centroids', num_iter=None): <NEW_LINE> <INDENT> centroids = term_freq_matrix[ ( numpy.random.choice(term_freq_matrix.shape[0], num_clusters, False) ), :] <NEW_LINE> iteration = 0 <NEW_LINE> terminate = False <NEW_LINE> assigned_clusters = None <NEW_... | Performs k means clustering on the term frequency matrix
:param csr_matrix term_freq_matrix: the term frequency matrix
:param int num_clusters: the number of article clusters
:param str dist_metric: the distance metric to use (`euclidean`,
`cosine`, or `jaccard`)
:param str term_cond: the termination condition (`c... | 625941bd6fece00bbac2d62f |
def __ne__(self, other: 'ConfigCACors') -> bool: <NEW_LINE> <INDENT> return not self == other | Return `true` when self and other are not equal, false otherwise. | 625941bd287bf620b61d395a |
def on_ffmpeg(self, event): <NEW_LINE> <INDENT> with wx.FileDialog(self, _("Choose the {} " "executable").format(self.ffmpeg), "", "", f"ffmpeg binary (*{self.ffmpeg})|*{self.ffmpeg}| " f"All files (*.*)|*.*", wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as dlgfile: <NEW_LINE> <INDENT> if dlgfile.ShowModal() == wx.ID_OK: <NEW_L... | Open filedialog to locate ffmpeg executable | 625941bd498bea3a759b99a3 |
def key_pressed(self, key: str): <NEW_LINE> <INDENT> super().key_pressed(key) <NEW_LINE> if self.stage == Stage.character_selection: <NEW_LINE> <INDENT> if key == "KEY_RIGHT": <NEW_LINE> <INDENT> self.monsters[0].set_selected(False) <NEW_LINE> self.rotate_monsters(1) <NEW_LINE> self.monsters[0].set_selected(True) <NEW_... | If we're in character selection, check move selection right or left based on user input.
If we're in restart section, move selection in the restart dialog.
:param key: Key that the user pressed.
:return: None | 625941bd7c178a314d6ef34d |
def test_successful(self): <NEW_LINE> <INDENT> url = '/%s/jobs/' % self.api <NEW_LINE> response = self.client.generic('GET', url) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) <NEW_LINE> result = json.loads(response.content) <NEW_LINE> self.assertEqual(len(result['results']), 3... | Tests successfully calling the jobs view. | 625941bdfff4ab517eb2f32d |
def set_scint_fraction(self, isotope, fraction): <NEW_LINE> <INDENT> if isotope in self._fractions: <NEW_LINE> <INDENT> self._fractions[isotope] = fraction <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise("Isotope not part of the Te set.") | Set the fraction of an isotope in the scintillator. | 625941bd9c8ee82313fbb668 |
@pytest.mark.skipif(sys.version_info[0] < 3, reason="Requires: Python 3+") <NEW_LINE> def test_dictionary_nested(): <NEW_LINE> <INDENT> encoded = bencode({'foo': 42, 'bar': {'sketch': 'parrot', 'foobar': 23}}) <NEW_LINE> assert encoded == 'd3:bard6:foobari23e6:sketch6:parrote3:fooi42ee'.encode('utf-8') | Test the handling of nested dictionaries. | 625941bd3317a56b86939b59 |
def remove_chroms(inbam, outbam, rmchroms, log): <NEW_LINE> <INDENT> treatment = AlignmentFile(inbam, 'rb') <NEW_LINE> header = treatment.header <NEW_LINE> new_chroms = [] <NEW_LINE> chrnames = [] <NEW_LINE> tid_map = [-1 for i in range(len(header['SQ']))] <NEW_LINE> N = 0 <NEW_LINE> chr_to_remove_reason = {} <NEW_LINE... | This function takes a bam-file and outputs
a bam-file in which the specified chromosomes
have been removed.
The function searches for matching chromosomes
using regular expressions.
For example, rmchroms=['chrM', '_random']
would remove 'chrM' as well as all random chromsomes.
E.g. chr1_KI270706v1_random. | 625941bd45492302aab5e1b4 |
def getX(self): <NEW_LINE> <INDENT> return self.x | Returns the x-coordinate of Alien | 625941bd5fc7496912cc3871 |
def get_file_list(dir): <NEW_LINE> <INDENT> rtn_list = [] <NEW_LINE> num_files = sum( (len(file_list) for _, _, file_list in os.walk(dir)) ) <NEW_LINE> logger.info('Scanning directory %s' % dir) <NEW_LINE> logger.info('Files to scan: %d' % num_files) <NEW_LINE> n = 0 <NEW_LINE> for dir_name, subdir_list, file_list in o... | Scan directory tree and generate a list of files. File information
is saved in objects of class FileInfo. | 625941bd0a50d4780f666d83 |
def _validate_predefined_entity_value(self, entity, entity_name, allowed_property_map, required_properties): <NEW_LINE> <INDENT> _assert_condition(not entity.HasField('key'), 'The %s entity has a key.' % entity_name) <NEW_LINE> property_map = {} <NEW_LINE> for prop in entity.property: <NEW_LINE> <INDENT> property_name ... | Validates a predefined entity (e.g. a user or a point).
Args:
entity: the predefined entity (an entity_v4_pb.Entity)
entity_name: the name of the entity (used in error messages)
allowed_property_map: a dict whose keys are property names allowed in
the entity and values are the expected types of these prope... | 625941bd63f4b57ef0001013 |
def reverseList(self, head): <NEW_LINE> <INDENT> if head == None or head.next == None: <NEW_LINE> <INDENT> return head <NEW_LINE> <DEDENT> node = self.reverseList(head.next) <NEW_LINE> head.next.next = head <NEW_LINE> head.next = None <NEW_LINE> return node | :type head: ListNode
:rtype: ListNode | 625941bd31939e2706e4cd61 |
def error_408(message = None): <NEW_LINE> <INDENT> if not isinstance(message, str) or len(message) < 1: <NEW_LINE> <INDENT> message = 'Request Timeout' <NEW_LINE> <DEDENT> return _error(code = 408, message = message) | JSON error handler for HTTP error 408 | 625941bdd164cc6175782c41 |
@should_show.command("fetch-data") <NEW_LINE> @click.option("--db-url", envvar="PGDATABASE") <NEW_LINE> @click.argument("filename", type=click.File("r")) <NEW_LINE> @click.argument("output", type=click.File("w")) <NEW_LINE> def fetch_training_data(filename, output, db_url=None): <NEW_LINE> <INDENT> r2dt.write_training_... | This builds a CSV file of training data to use for the model building. I
keep it separate so I can build a training csv and play with it interactivly
before committing the final modeling building logic to the pipeline. | 625941bd07f4c71912b1137a |
def test_add_needs_release(self): <NEW_LINE> <INDENT> bug = Bug("1", "kanban", MEDIUM, FIX_COMMITTED, "A title", merge_proposal="url", merge_proposal_status=MERGED, tags=["verified"]) <NEW_LINE> kanban_board = self.create_test_class() <NEW_LINE> kanban_board.add(bug) <NEW_LINE> self.assertEqual([bug], kanban_board.bugs... | A L{Bug} in the 'Needs release' category is stored in the
L{BugCollectionMixin.bugs} and the L{Story.needs_release} lists, in
the default story. | 625941bdff9c53063f47c0e8 |
def __init__(self, timestamp, fel, warehouse, name=""): <NEW_LINE> <INDENT> self.timestamp = timestamp <NEW_LINE> self.fel = fel <NEW_LINE> self.warehouse = warehouse <NEW_LINE> self.name = name | Initializes event object
:param event_data: data specific to the event instance
:param event_handler: a method to handle the event, takes event_data as a paramter | 625941bd097d151d1a222d4f |
def driverHandle(self): <NEW_LINE> <INDENT> return QVariant() | QVariant Solid.AudioInterface.driverHandle() | 625941bd50812a4eaa59c218 |
def reset_session(self): <NEW_LINE> <INDENT> if not self.is_open(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._active_result is not None: <NEW_LINE> <INDENT> self._active_result.fetch_all() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.protocol.send_reset() <NEW_LINE> <DEDENT> except (InterfaceError, O... | Reset a sucessfully authenticated session. | 625941bd0fa83653e4656eb0 |
def count_attribute_set(self, attr): <NEW_LINE> <INDENT> attr_present = 0 <NEW_LINE> for item in self.data: <NEW_LINE> <INDENT> if attr in item: <NEW_LINE> <INDENT> if item[attr]['values']: <NEW_LINE> <INDENT> attr_present += 1 <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.unset[attr] += 1 <NEW... | count the number of items in the data
where the attribute `attr` is set
:param attr: str identifying a property | 625941bd8da39b475bd64e64 |
def test_explicitly_set_in_schema_subqueryload(self): <NEW_LINE> <INDENT> u1, u2, u3, p11, p12, p21, p22, cm11, cm12, cm21, cm22, cm_empty = self._seed() <NEW_LINE> self.query_count = 0 <NEW_LINE> res = Comment.smart_query( filters=dict(post___public=True, post___user___name__like='Bi%'), schema={ 'post': { ... | here we explicitly set in schema that we additionally want to load
post___comments | 625941bdf9cc0f698b1404f1 |
@cbook.deprecated("3.2") <NEW_LINE> def mx2num(mxdates): <NEW_LINE> <INDENT> scalar = False <NEW_LINE> if not np.iterable(mxdates): <NEW_LINE> <INDENT> scalar = True <NEW_LINE> mxdates = [mxdates] <NEW_LINE> <DEDENT> ret = epoch2num([m.ticks() for m in mxdates]) <NEW_LINE> if scalar: <NEW_LINE> <INDENT> return ret[0] <... | Convert mx :class:`datetime` instance (or sequence of mx
instances) to the new date format. | 625941bda05bb46b383ec718 |
def set_working_volume(self, mount: Mount, tip_volume: int) -> None: <NEW_LINE> <INDENT> ... | Inform the hardware how much volume a pipette can aspirate.
This will set the limit of aspiration for the pipette, and is
necessary for backcompatibility. | 625941bd82261d6c526ab38f |
def test_write_only(self): <NEW_LINE> <INDENT> name = self.mktemp() <NEW_LINE> f = File(name, 'w') <NEW_LINE> f.close() <NEW_LINE> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> f = h5py.File(name, 'r', userblock_size=512) <NEW_LINE> <DEDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> f = h5py.Fil... | User block only allowed for write | 625941bd3346ee7daa2b2c5d |
def get_env(self): <NEW_LINE> <INDENT> env = {} <NEW_LINE> env['SSH_AUTH_SOCK'] = self._get_filename() <NEW_LINE> return env | Helper for the environnement under unix
:return:
a dict containing the ``SSH_AUTH_SOCK`` environnement variables | 625941bd236d856c2ad446cd |
def populateBbox(self, margin=0.1): <NEW_LINE> <INDENT> glist = self.getxmlelement('geolocationGrid/geolocationGridPointList') <NEW_LINE> lat = [] <NEW_LINE> lon = [] <NEW_LINE> for child in glist: <NEW_LINE> <INDENT> lat.append( float(child.find('latitude').text)) <NEW_LINE> lon.append( float(child.find('longitude').t... | Populate the bounding box from metadata. | 625941bd283ffb24f3c557fe |
def connect_handler(data): <NEW_LINE> <INDENT> post_data = { 'username': username, 'password': password, 'connection_info': data} <NEW_LINE> resp = requests.post(GOALFEED_AUTH_ENDPOINT, post_data, timeout=30).json() <NEW_LINE> channel = pusher.subscribe('private-goals', resp['auth']) <NEW_LINE> channel.bind('goal', goa... | Handle connection. | 625941bd2ae34c7f2600d025 |
def handle(req): <NEW_LINE> <INDENT> if req=='train\n': <NEW_LINE> <INDENT> train() <NEW_LINE> print('Modelo entrenado') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> binary = os.fsencode(req) <NEW_LINE> image_64_decode = base64.decodebytes(binary) <NEW_LINE> result_file = 'image' <NEW_LINE> with open(result_file, 'wb'... | handle a request to the function
Args:
req (str): request body | 625941bd1d351010ab855a10 |
@given('two positive integer values 0 and 600') <NEW_LINE> def given1(context): <NEW_LINE> <INDENT> context.range_int = (0, 600) | Given two positive integer values 0 and 600'). | 625941bdac7a0e7691ed3fcc |
def train_model_validation(filename_train_validation_set, filename_labels_train_validation_set, filter_density, dropout, input_shape, output_shape, file_path_model, filename_log, channel=1): <NEW_LINE> <INDENT> filenames_train, Y_train, filenames_validation, Y_validation, filenames_features, Y_train_validation = ... | train model with validation | 625941bd099cdd3c635f0b50 |
def repo(): <NEW_LINE> <INDENT> if 'SUDO_USER' in os.environ: <NEW_LINE> <INDENT> return os.path.expanduser('~{0}/.blueprints.git'. format(os.environ['SUDO_USER'])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return os.path.expanduser('~/.blueprints.git') | Return the full path to the Git repository. | 625941bd32920d7e50b280c1 |
def get_base_upgrade_order(packages): <NEW_LINE> <INDENT> upgrade_order = pisilinux.operations.upgrade.upgrade_base <NEW_LINE> order = upgrade_order(packages) <NEW_LINE> return list(order) | Return a list of packages of the system.base component that needs to be upgraded
or installed in install order -> list_of_strings
All the packages of the system.base component must be installed on the system
@param packages: list of package names -> list_of_strings | 625941bd56ac1b37e62640c8 |
def copy_from(self, other): <NEW_LINE> <INDENT> if (self.parent is None) != (other.parent is None): <NEW_LINE> <INDENT> raise ValueError('cannot copy scopes of different structures') <NEW_LINE> <DEDENT> if other.parent is not None: <NEW_LINE> <INDENT> self.parent.copy_from(other.parent) <NEW_LINE> <DEDENT> self.isolate... | Recursively copies the contents of this scope from another scope. | 625941bdd4950a0f3b08c245 |
def checkDirs(self): <NEW_LINE> <INDENT> for folder in [MCBUP_LOGDIR,MCBUP_MASTER_STORE,MCBUP_WORKING_STORE]: <NEW_LINE> <INDENT> if not os.path.exists(folder): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(folder) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> print("Unable to create logging d... | Checks if required directory structure exists, and if not - creates. | 625941bdc432627299f04b37 |
def create_nite(data): <NEW_LINE> <INDENT> col = 'caldat' <NEW_LINE> if not len(data): return np.array([],dtype='S8') <NEW_LINE> dtype ='S%i'%(len(max(data[col], key=len))) <NEW_LINE> nite = data[col].values.astype(dtype) <NEW_LINE> nite = np.char.replace(nite,'-','') <NEW_LINE> return nite | Convert 'caldat' to 'nite'. This is the faster option since
it relies on NOAO to calculate the nite. | 625941bdc4546d3d9de72925 |
def shoot(self, **kwargs): <NEW_LINE> <INDENT> self._validate_shoot_args() <NEW_LINE> options = self._lua.globals.util.serialize( self._lua.table(**self._parse_shoot_args(**kwargs))) <NEW_LINE> if not kwargs.get('stream', True): <NEW_LINE> <INDENT> return self._shoot_nonstreaming( options, wait=kwargs.get('wait', True)... | Shoot a picture
For all arguments where `None` is a legal type, it signifies that the
current value from the camera should be used and not be overriden.
:param shutter_speed: Shutter speed in APEX96 (default: None)
:type shutter_speed: int/float/None
:param real_iso: Canon 'real' ISO (default: None)
:type... | 625941bda4f1c619b28aff33 |
def mirror_targets(self, mirrorlist): <NEW_LINE> <INDENT> if not type(mirrorlist) is list: <NEW_LINE> <INDENT> mirrorlist = [mirrorlist] <NEW_LINE> <DEDENT> elif len(mirrorlist) > 3: <NEW_LINE> <INDENT> raise AttributeError("Mirror list has greater than 3 items") <NEW_LINE> <DEDENT> self['mirror_targets'] = mirrorlist ... | Will set/update the mirror target list of the object.
Arguments:
mirror_targets -- a list of IPs that can have up to 3 items | 625941bd55399d3f055885a7 |
def push(self, node: SearchNode, placed_by: SearchNode = None) -> bool: <NEW_LINE> <INDENT> if node.name in self.visited: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> cost = 0.0 <NEW_LINE> if placed_by: <NEW_LINE> <INDENT> pcost = self.path_cost(placed_by) <NEW_LINE> wcost = placed_by.weight(node.name) <NEW_LIN... | Push node onto priority queue
:param node:
:param placed_by:
:return: | 625941bd30bbd722463cbcb7 |
def connect(self): <NEW_LINE> <INDENT> self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> server_address = (self.host, self.port) <NEW_LINE> try: <NEW_LINE> <INDENT> self.sock.settimeout(self.connect_timeout) <NEW_LINE> self.sock.connect(server_address) <NEW_LINE> <DEDENT> except socket.error: <NE... | Connect and authenticate to the server. | 625941bd6fb2d068a760ef8e |
def insert(self, key, val): <NEW_LINE> <INDENT> if key in self._dict: <NEW_LINE> <INDENT> self._dict[key] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._dict[key] = val | :type key: str
:type val: int
:rtype: void | 625941bda8370b7717052794 |
def sum_them_all(number): <NEW_LINE> <INDENT> pass | 2. Given a number, return the sum of all digits from
zero to the number (inclusive). | 625941bdb7558d58953c4e0d |
def minPathSum(self, grid: List[List[int]]) -> int: <NEW_LINE> <INDENT> m, n = len(grid), len(grid[0]) <NEW_LINE> dp = [[None]*n for _ in range(m)] <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> for j in range(n): <NEW_LINE> <INDENT> if i==0 and j==0: dp[i][j]=grid[i][j] <NEW_LINE> else: <NEW_LINE> <INDENT> dp[i][j]... | Aim in this question being, to find shortest path.
We need to break it down and see which is shorter. Whether the left one or top one.
And we can conclude on our answer. | 625941bd7b180e01f3dc46f7 |
def set_memory_limit(self, memory): <NEW_LINE> <INDENT> self._validate_memory_string(memory) <NEW_LINE> self.memory_limit = memory | Set memory limit (maximum) for this operator.
Args:
memory: a string which can be a number or a number followed by one of
"E", "P", "T", "G", "M", "K". | 625941bde1aae11d1e749ba9 |
def __init__(self, plantype): <NEW_LINE> <INDENT> self.plantype = plantype | Constructor | 625941bd0c0af96317bb80dc |
@with_setup(pretest, posttest) <NEW_LINE> @retry_on_except() <NEW_LINE> def test_iter_overhead_hard(): <NEW_LINE> <INDENT> total = int(1e5) <NEW_LINE> with closing(MockIO()) as our_file: <NEW_LINE> <INDENT> a = 0 <NEW_LINE> with trange(total, file=our_file, leave=True, miniters=1, mininterval=0, maxinterval=0) as t: <N... | Test overhead of iteration based tqdm (hard) | 625941bdf8510a7c17cf95ef |
def colorPicker(self, event=None): <NEW_LINE> <INDENT> dlg = PsychoColorPicker(None) <NEW_LINE> dlg.ShowModal() <NEW_LINE> dlg.Destroy() <NEW_LINE> if event is not None: <NEW_LINE> <INDENT> event.Skip() | Open color-picker, sets clip-board to string [r,g,b].
Note: units are psychopy -1..+1 rgb units to three decimal places,
preserving 24-bit color. | 625941bd6aa9bd52df036c97 |
def knoepfe_menü(user): <NEW_LINE> <INDENT> alle = { 'index': ('/', 'Startseite'), 'olymp': (reverse('Wettbewerbe:index'), 'Wettbewerbe'), 'ehemalige': (reverse('Ehemalige:index'), 'Ehemalige'), 'impressum': (reverse('impressum'), 'Impressum'), 'db': ('https://olymp.piokg.de/static/db.pdf', 'Datenbanklayout'), 'todo': ... | gibt Knöpfe für Menüleiste als Liste von Tupeln zurück | 625941bdd10714528d5ffbd4 |
def _get_permutations_draw(draw, len): <NEW_LINE> <INDENT> result = itertools.permutations(draw, len) <NEW_LINE> return list(result) | Helper to get all permutations of a draw (list of letters), hint:
use itertools.permutations (order of letters matters) | 625941bd5fc7496912cc3872 |
def draw_step_and_sigmoid(): <NEW_LINE> <INDENT> draw_function(f=lambda x: 1 / (1 + np.exp(-x)), save=False) <NEW_LINE> draw_function(f=lambda x: np.abs(x) / (2 * x) + 1 / 2, save=True, name="step and sigmoid", clear_plot=False) | Plot a smooth curve of the sigmoid and step function on the same figure. | 625941bd187af65679ca5012 |
def get_oci_account(name, user_id): <NEW_LINE> <INDENT> response = handle_request( current_app.config['DATABASE_API_URL'], 'oci_accounts/', 'get', job_data={'name': name, 'user_id': user_id} ) <NEW_LINE> account = response.json() <NEW_LINE> if not account: <NEW_LINE> <INDENT> raise MashException( 'OCI account {account}... | Get OCI account for given user. | 625941bd9f2886367277a784 |
def wells_from(self, start, num, columnwise=False): <NEW_LINE> <INDENT> start = self.robotize(start) <NEW_LINE> if columnwise: <NEW_LINE> <INDENT> row, col = self.decompose(start) <NEW_LINE> num_rows = self.container_type.row_count() <NEW_LINE> start = col * num_rows + row <NEW_LINE> <DEDENT> return WellGroup(self.all_... | Return a WellGroup of Wells belonging to this Container starting from
the index indicated (in integer or string form) and including the number
of proceeding wells specified. Wells are counted from the starting well
rowwise unless columnwise is True.
Parameters
----------
start : Well, int, str
Starting well speci... | 625941bd3d592f4c4ed1cf6a |
def binary_erosion(x, radius=3): <NEW_LINE> <INDENT> mask = disk(radius) <NEW_LINE> x = _binary_erosion(x, selem=mask) <NEW_LINE> return x | Return binary morphological erosion of an image,
see `skimage.morphology.binary_erosion <http://scikit-image.org/docs/dev/api/skimage.morphology.html#skimage.morphology.binary_erosion>`__.
Parameters
-----------
x : 2D array
A binary image.
radius : int
For the radius of mask.
Returns
-------
numpy.array
... | 625941bdeab8aa0e5d26da52 |
def test_token_null(self): <NEW_LINE> <INDENT> payload = {"token":'', 'oldPassword': '123456', 'newPassword': '654321'} <NEW_LINE> r2 = self.s.post(self.base_url, data=payload) <NEW_LINE> self.result = r2.json() <NEW_LINE> self.assertEqual(self.result['result'], False) <NEW_LINE> pwd = test_data.ua_emp_search(value="PA... | 空的token | 625941bd85dfad0860c3ad4e |
def set_thread_priority(self, *args, **kwargs): <NEW_LINE> <INDENT> return _howto_swig.chan_info_parser_sptr_set_thread_priority(self, *args, **kwargs) | set_thread_priority(chan_info_parser_sptr self, int priority) -> int | 625941bd66673b3332b91f85 |
def p_multelsif(p): <NEW_LINE> <INDENT> getRule(p,'multelsif') | multelsif : elsif expr pthen compstmt multelsif
| empty | 625941bd293b9510aa2c318d |
def extract_data(self, GT_mask, s_idx): <NEW_LINE> <INDENT> data = np.array([]).reshape((-1, self.cn + 2)) <NEW_LINE> for i in range(1, self.cn + 1): <NEW_LINE> <INDENT> mask = (GT_mask == i) <NEW_LINE> idx = np.where(mask.flatten())[0].reshape((-1, 1)) <NEW_LINE> T1 = self.T1[s_idx, :, :][mask].reshape((-1, 1)) <NEW_L... | EXTRACT_DATA
On the basis of given indices of slices, extract
point value from T1, T2 and PD to for feature matrix.
Each point has 5 dimensions of information:
index (position in slice), T1 value, T2 value, PD value,
label (1 for CSF, 2 for GM, 3 for WM). | 625941bd71ff763f4b54957b |
def compare_yv(yv, control_yv, mode, is_control=False): <NEW_LINE> <INDENT> y, y_var = yv <NEW_LINE> if mode == 'ratio': <NEW_LINE> <INDENT> if is_control: <NEW_LINE> <INDENT> return np.ones_like(y), np.zeros_like(y_var) <NEW_LINE> <DEDENT> control_y, control_y_var = (1, 0) if control_yv is None else control_yv <NEW_LI... | :return: c, c_var | 625941bd925a0f43d2549d68 |
def load_latest_checkpoint_from_bucket(tensorboard_run, bucket, train_dir): <NEW_LINE> <INDENT> import numpy as np <NEW_LINE> checkpoints = gsutil_ls(bucket, filter=tensorboard_run) <NEW_LINE> if "BucketNotFoundException" in checkpoints: <NEW_LINE> <INDENT> raise ValueError( "ERROR: bucket not found, path={}".format(bu... | find latest zipped 'checkpoint' in bucket and download
similar to tf.train.latest_checkpoint()
Args:
tensorboard_run: filter for zip files from the same run
e.g. "y-tensorboard-run" for "my-tensorboard-run.6000.zip"
bucket: "gs://[bucket]"
train_dir: a diretory path to restore the checkpoint files,
... | 625941bd1d351010ab855a11 |
def populateMethods(self): <NEW_LINE> <INDENT> self.retrieval_models = {} <NEW_LINE> all_doc_methods = None <NEW_LINE> if self.exp.get("doc_methods", None): <NEW_LINE> <INDENT> all_doc_methods = getDictOfTestingMethods(self.exp["doc_methods"]) <NEW_LINE> if self.exp["full_corpus"]: <NEW_LINE> <INDENT> all_files = ["ALL... | Fills dict with all the test methods, parameters and options, including
the retrieval instances | 625941bd7d43ff24873a2b92 |
def test_url_remote_http(self): <NEW_LINE> <INDENT> url = 'http://somewhere' <NEW_LINE> is_remote = is_remote_url(url) <NEW_LINE> self.assertTrue(is_remote) | verify that a remote http url is identified.
| 625941bdbe7bc26dc91cd4f9 |
def __init__(self): <NEW_LINE> <INDENT> self.GatewayId = None | :param GatewayId: 网关实例ID,目前我们支持的网关实例类型有,
专线网关实例ID,形如,`dcg-ltjahce6`;
Nat网关实例ID,形如,`nat-ltjahce6`;
VPN网关实例ID,形如,`vpn-ltjahce6`。
:type GatewayId: str
| 625941bd8a349b6b435e8068 |
def test_stage_single_bundle(self): <NEW_LINE> <INDENT> bundle = self.create_run_bundle( state=State.STAGED, metadata=dict(request_memory="0", request_time="", request_cpus=1, request_gpus=0), ) <NEW_LINE> self.save_bundle(bundle) <NEW_LINE> self.mock_worker_checkin(cpus=1, user_id=self.user_id) <NEW_LINE> self.bundle_... | When a worker with the right specs is available, a bundle should be staged. | 625941bd21a7993f00bc7bdf |
def _update(self, data): <NEW_LINE> <INDENT> for k, v in data.iteritems(): <NEW_LINE> <INDENT> new_value = v <NEW_LINE> if isinstance(v, dict): <NEW_LINE> <INDENT> new_value = type(self)(v) <NEW_LINE> <DEDENT> elif isinstance(v, list): <NEW_LINE> <INDENT> new_value = [(type(self)(e) if isinstance(e, dict) else e) for e... | Update the object with new data. | 625941bd21a7993f00bc7be0 |
def submit_gradient(self, from_addr, model_id, grad): <NEW_LINE> <INDENT> ipfs_address = self.ipfs.store(grad) <NEW_LINE> self.get_transaction(from_addr).addGradient( model_id, IPFSAddress().to_ethereum(ipfs_address)) <NEW_LINE> return self.call.getNumGradientsforModel(model_id) - 1 | This accepts gradients for a model from syft.nn and uploads them to
the blockchain (via IPFS), linked to a model by it's id.
TODO: modify syft.nn to actually have a "getGradients()" method call so
that there can be checks that keep people from uploading junk.
Currently any python object could be uploaded (which is obv... | 625941bd29b78933be1e55a5 |
def function(self, data): <NEW_LINE> <INDENT> deg = sp.diags(a1ifmat(data.sum(1))**-.5, 0) <NEW_LINE> lap = deg @ data @ deg <NEW_LINE> return lap | Normalized Symmetric Graph Laplacian
Parameters
----------
data : :obj:`sp.csr_matrix` or :obj:`np.ndarray`
Graph affinity/similarity matrix.
Returns
-------
:obj:`sp.csr_matrix`
Sparse representation of a symmetric graph laplacian matrix | 625941bd091ae35668666e58 |
def print_warning(content): <NEW_LINE> <INDENT> print(COLOR_YELLOW_FORMAT % (WARNING_INFO % content)) | Print warning information to screen | 625941bd4c3428357757c21f |
def part(self, channel): <NEW_LINE> <INDENT> self.irc.send(self.encode('PART {0}'.format(channel))) | Leaves channel | 625941bd24f1403a92600a5e |
@register.inclusion_tag('cclikes/inclusion_tags/cclikes_extender.html', takes_context=True) <NEW_LINE> def likes(context, obj, template=None): <NEW_LINE> <INDENT> if template is None: <NEW_LINE> <INDENT> template = 'cclikes/inclusion_tags/cclikes.html' <NEW_LINE> <DEDENT> request = context['request'] <NEW_LINE> import_... | Register a callable as an inclusion tag:
@register.inclusion_tag('results.html')
def show_results(poll):
choices = poll.choice_set.all()
return {'choices': choices} | 625941bd94891a1f4081b99d |
def start_interpreter(self, namespace): <NEW_LINE> <INDENT> self.clear() <NEW_LINE> if self.interpreter is not None: <NEW_LINE> <INDENT> self.interpreter.closing() <NEW_LINE> <DEDENT> self.interpreter = Interpreter( namespace, self.exitfunc, SysOutput, WidgetProxy, self.debug ) <NEW_LINE> self.interpreter.stdout_write.... | Start Python interpreter | 625941bd796e427e537b04b8 |
def issue_date(self): <NEW_LINE> <INDENT> pattern = r'^\s*dated as of (.*)\s*$' <NEW_LINE> m = re.search(pattern, self.content, flags= re.IGNORECASE|re.MULTILINE) <NEW_LINE> if m: <NEW_LINE> <INDENT> print('date string', m.group(1)) <NEW_LINE> return dateparser.parse(m.group(1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <IN... | DATED AS OF SEPTEMBER 13, 1994 | 625941bd656771135c3eb761 |
def _create_event_ch(self, events, n_samples=None): <NEW_LINE> <INDENT> n_dropped = len(events[:, 0]) - len(set(events[:, 0])) <NEW_LINE> if n_dropped > 0: <NEW_LINE> <INDENT> warn(str(n_dropped) + " events will be dropped because they " "occur on the same time sample as another event. " "`mne.io.Raw` objects store eve... | Create the event channel. | 625941bd6e29344779a62509 |
def map_(f): <NEW_LINE> <INDENT> return lambda xs: list(map(f, xs)) | The list obtained by applying f
to each element of xs. | 625941bd29b78933be1e55a6 |
def user_input(connection: connect_server.GameConnection, game_state: connectfour.GameState) -> connectfour.GameState: <NEW_LINE> <INDENT> user_command = input() <NEW_LINE> while c4_shared_function.game_move(game_state, user_command) is None: <NEW_LINE> <INDENT> user_command = input() <NEW_LINE> <DEDENT> connect_server... | Asks for the user command. If user command is invalid, no changes will be done on the game state.
If valid, sends the user command to the server, updates the game state, and prints out the
updated game board. User will be promtp continuously until command is valid. | 625941bdde87d2750b85fc84 |
def parseIntegratedTestScriptCommands(source_path, keywords): <NEW_LINE> <INDENT> keywords_re = re.compile( to_bytes("(%s)(.*)\n" % ("|".join(re.escape(k) for k in keywords),))) <NEW_LINE> f = open(source_path, 'rb') <NEW_LINE> try: <NEW_LINE> <INDENT> data = f.read() <NEW_LINE> if not data.endswith(to_bytes('\n')): <N... | parseIntegratedTestScriptCommands(source_path) -> commands
Parse the commands in an integrated test script file into a list of
(line_number, command_type, line). | 625941bd1f037a2d8b9460f3 |
def part1() -> int: <NEW_LINE> <INDENT> coords = list(all_coords) <NEW_LINE> def calculate_areas( coords: List[Tuple[int, int]], min_x: int, min_y: int, max_x: int, max_y: int) -> Dict[Tuple[int, int], int]: <NEW_LINE> <INDENT> coords_and_area = {c: 0 for c in coords} <NEW_LINE> for x in range(min_x, max_x): <NEW_LINE>... | Using only the Manhattan distance, determine the area around each
coordinate by counting the number of integer X,Y locations that are closest
to that coordinate (and aren't tied in distance to any other coordinate).
What is the size of the largest area that isn't infinite? | 625941bd7047854f462a1301 |
def _VerifyValues(self, tensor_in_sizes, filter_in_sizes, stride, padding, expected): <NEW_LINE> <INDENT> total_size_1 = 1 <NEW_LINE> total_size_2 = 1 <NEW_LINE> for s in tensor_in_sizes: <NEW_LINE> <INDENT> total_size_1 *= s <NEW_LINE> <DEDENT> for s in filter_in_sizes: <NEW_LINE> <INDENT> total_size_2 *= s <NEW_LINE>... | Verifies the output values of the convolution function.
Args:
tensor_in_sizes: Input tensor dimensions in
[batch, input_rows, input_cols, input_depth].
filter_in_sizes: Filter tensor dimensions in
[kernel_rows, kernel_cols, input_depth, output_depth].
stride: Stride.
padding: Padding type.
expected: ... | 625941bdfbf16365ca6f60b3 |
def _test_delete_subnet_with_ports(self, mode): <NEW_LINE> <INDENT> slaac_network = self.create_network() <NEW_LINE> subnet_slaac = self.create_subnet(slaac_network, **{'ipv6_ra_mode': mode, 'ipv6_address_mode': mode}) <NEW_LINE> port = self.create_port(slaac_network) <NEW_LINE> self.assertIsNotNone(port['fixed_ips'][0... | Create subnet and delete it with existing ports | 625941bd63d6d428bbe443e4 |
def max(self) -> Key: <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise NoSuchElementException("Priority queue underflow") <NEW_LINE> <DEDENT> assert self._pq[1] is not None <NEW_LINE> return self._pq[1] | Returns a largest key on this priority queue.
:return: a largest key on the priority queue
:raises NoSuchElementException: if this priority queue is empty | 625941bd4e696a04525c9341 |
def find_operands(entity_list, lemma): <NEW_LINE> <INDENT> if lemma.name in all_axioms.keys(): <NEW_LINE> <INDENT> operands = lemma.infer_operands(entity_list=entity_list) <NEW_LINE> if operands is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif operands is False: <NEW_LINE> <INDENT> perms = list(itertoo... | Given the lemma and the entity, find operands that can be applied with the given lemma.
:param lemma:
:return: operands for the lemma | 625941bda79ad161976cc03a |
def update_search_parameters(self, selected_gender, selected_category, selected_subcategory): <NEW_LINE> <INDENT> self.model.set_gender(selected_gender) <NEW_LINE> self.model.set_category(selected_category) <NEW_LINE> self.model.set_subcategory(selected_subcategory) <NEW_LINE> self.model.fetch_results() | Pass parameters selected by user to the model
:return: N/A | 625941bd090684286d50ebd7 |
def __init__(__self__, *, destination_vault_arn: pulumi.Input[str], lifecycle: Optional[pulumi.Input['PlanRuleCopyActionLifecycleArgs']] = None): <NEW_LINE> <INDENT> pulumi.set(__self__, "destination_vault_arn", destination_vault_arn) <NEW_LINE> if lifecycle is not None: <NEW_LINE> <INDENT> pulumi.set(__self__, "lifecy... | :param pulumi.Input[str] destination_vault_arn: An Amazon Resource Name (ARN) that uniquely identifies the destination backup vault for the copied backup.
:param pulumi.Input['PlanRuleCopyActionLifecycleArgs'] lifecycle: The lifecycle defines when a protected resource is copied over to a backup vault and when it expire... | 625941bd57b8e32f5248338e |
def model(X_train, Y_train, X_test, Y_test, num_iterations = 2000, learning_rate = 0.5, print_cost = False): <NEW_LINE> <INDENT> w, b = initialize_with_zeros(num_px * num_px * 3) <NEW_LINE> parameters, grads, costs = optimize(w, b, X_train, Y_train, num_iterations, learning_rate, print_cost = False) <NEW_LINE> w = para... | Builds the logistic regression model by calling the function you've implemented previously
Arguments:
X_train -- training set represented by a numpy array of shape (num_px * num_px * 3, m_train)
Y_train -- training labels represented by a numpy array (vector) of shape (1, m_train)
X_test -- test set represented by a n... | 625941bd5fcc89381b1e15b2 |
def __init__(self, root_directory, sha1_chunk=10): <NEW_LINE> <INDENT> self._root_dir = os.path.abspath(os.path.expanduser(root_directory)) <NEW_LINE> self._sha1_chunk = sha1_chunk <NEW_LINE> self._log.debug("Initializing FileSet under root dir: %s", self._root_dir) <NEW_LINE> self._element_map = {} <NEW_LINE> self._el... | Initialize a new or existing file set from a root directory.
:param root_directory: Directory that this file set is based in. For
relative path resolution, see the ``work_relative`` parameter
description.
:type root_directory: str
:param sha1_chunk: Number of segments to split data element SHA1 sum
into w... | 625941bd2eb69b55b151c7a1 |
def get_activations( self, x: np.ndarray, layer: Union[int, str], batch_size: int = 128, framework: bool = False ) -> np.ndarray: <NEW_LINE> <INDENT> raise NotImplementedError | Return the output of the specified layer for input `x`. `layer` is specified by layer index (between 0 and
`nb_layers - 1`) or by name. The number of layers can be determined by counting the results returned by
calling `layer_names`.
:param x: Input for computing the activations.
:param layer: Layer for computing the ... | 625941bd7c178a314d6ef34f |
def get_reddit_data(subreddit, date): <NEW_LINE> <INDENT> titles = [] <NEW_LINE> url = "https://web.archive.org/web/" + date + "/reddit.com/" + subreddit <NEW_LINE> print(url) <NEW_LINE> driver.get(url) <NEW_LINE> try: <NEW_LINE> <INDENT> sitetable = driver.find_element_by_id("siteTable") <NEW_LINE> posts = sitetable.f... | Gets top 26 frontpage titles from 'subreddit' on 'date
:param subreddit: ex: 'r/bitcoin'
:param date: in 'YYYYMMDD'
:return titles: a list of strings of titles | 625941bd956e5f7376d70d64 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.