Search is not available for this dataset
text
stringlengths
75
104k
def lte(max_value): """ Validates that a field value is less than or equal to the value given to this validator. """ def validate(value): if value > max_value: return e("{} is not less than or equal to {}", value, max_value) return validate
def gt(gt_value): """ Validates that a field value is greater than the value given to this validator. """ def validate(value): if value <= gt_value: return e("{} is not greater than {}", value, gt_value) return validate
def lt(lt_value): """ Validates that a field value is less than the value given to this validator. """ def validate(value): if value >= lt_value: return e("{} is not less than {}", value, lt_value) return validate
def between(min_value, max_value): """ Validates that a field value is between the two values given to this validator. """ def validate(value): if value < min_value: return e("{} is not greater than or equal to {}", value, min_value) if value > max_value: ...
def length(min=None, max=None): """ Validates that a field value's length is between the bounds given to this validator. """ def validate(value): if min and len(value) < min: return e("{} does not have a length of at least {}", value, min) if max and len(value) > max: ...
def match(pattern): """ Validates that a field value matches the regex given to this validator. """ regex = re.compile(pattern) def validate(value): if not regex.match(value): return e("{} does not match the pattern {}", value, pattern) return validate
def is_email(): """ Validates that a fields value is a valid email address. """ email = ( ur'(?!^\.)' # No dot at start ur'(?!.*\.@)' # No dot before at sign ur'(?!.*@\.)' # No dot after at sign ur'(?!.*\.$)' # No dot at the end ur'(?!.*\.\.)' # No dou...
def is_url(): """ Validates that a fields value is a valid URL. """ # Stolen from Django regex = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost......
def each_item(*validators): """ A wrapper which applies the given validators to each item in a field value of type `list`. Example usage in a Schema: "my_list_field": {"type": Array(int), "validates": each_item(lte(10))} """ def validate(value): for item in value: for v...
def distinct(): """ Validates that all items in the given field list value are distinct, i.e. that the list contains no duplicates. """ def validate(value): for i, item in enumerate(value): if item in value[i+1:]: return e("{} is not a distinct set of values", val...
def apply_defaults(self, instance): """Applies the defaults described by the this schema to the given document instance as appropriate. Defaults are only applied to fields which are currently unset.""" for field, spec in self.doc_spec.iteritems(): field_type = spec['type'] ...
def validate(self, instance): """Validates the given document against this schema. Raises a ValidationException if there are any failures.""" errors = {} self._validate_instance(instance, errors) if len(errors) > 0: raise ValidationException(errors)
def _verify(self, path_prefix=None): """Verifies that this schema's doc spec is valid and makes sense.""" for field, spec in self.doc_spec.iteritems(): path = self._append_path(path_prefix, field) # Standard dict-based spec if isinstance(spec, dict): ...
def _verify_field_spec(self, spec, path): """Verifies a given field specification is valid, recursing into nested schemas if required.""" # Required should be a boolean if 'required' in spec and not isinstance(spec['required'], bool): raise SchemaFormatException("{} required declara...
def _verify_type(self, spec, path): """Verify that the 'type' in the spec is valid""" field_type = spec['type'] if isinstance(field_type, Schema): # Nested documents cannot have validation if not set(spec.keys()).issubset(set(['type', 'required', 'nullable', 'default']))...
def _verify_default(self, spec, path): """Verifies that the default specified in the given spec is valid.""" field_type = spec['type'] default = spec['default'] # If it's a function there's nothing we can really do except assume its valid if callable(default): return...
def _verify_validates(self, spec, path): """Verify thats the 'validates' argument is valid.""" validates = spec['validates'] if isinstance(validates, list): for validator in validates: self._verify_validator(validator, path) else: self._verify_val...
def _verify_validator(self, validator, path): """Verifies that a given validator associated with the field at the given path is legitimate.""" # Validator should be a function if not callable(validator): raise SchemaFormatException("Invalid validations for {}", path) # Vali...
def _validate_instance(self, instance, errors, path_prefix=''): """Validates that the given instance of a document conforms to the given schema's structure and validations. Any validation errors are added to the given errors collection. The caller should assume the instance is considered valid i...
def _validate_value(self, value, field_spec, path, errors): """Validates that the given field value is valid given the associated field spec and path. Any validation failures are added to the given errors collection.""" # Check if the value is None and add an error if the field is not n...
def load_config(path=None, defaults=None): """ Loads and parses an INI style configuration file using Python's built-in configparser module. If path is specified, load it. If ``defaults`` (a list of strings) is given, try to load each entry as a file, without throwing any error if the operation fail...
def as_dict(config): """ Converts a ConfigParser object into a dictionary. The resulting dictionary has sections as keys which point to a dict of the sections options as key => value pairs. """ settings = defaultdict(lambda: {}) for section in config.sections(): for key, val in conf...
def initialize(self, timeouts): """ Bind or connect the nanomsg socket to some address """ # Bind or connect to address if self.bind is True: self.socket.bind(self.address) else: self.socket.connect(self.address) # Set send and recv timeouts self...
def _set_timeouts(self, timeouts): """ Set socket timeouts for send and receive respectively """ (send_timeout, recv_timeout) = (None, None) try: (send_timeout, recv_timeout) = timeouts except TypeError: raise EndpointError( '`timeouts` must be a...
def send(self, payload): """ Encode and sign (optional) the send through socket """ payload = self.encode(payload) payload = self.sign(payload) self.socket.send(payload)
def receive(self, decode=True): """ Receive from socket, authenticate and decode payload """ payload = self.socket.recv() payload = self.verify(payload) if decode: payload = self.decode(payload) return payload
def sign(self, payload): """ Sign payload using the supplied authenticator """ if self.authenticator: return self.authenticator.signed(payload) return payload
def verify(self, payload): """ Verify payload authenticity via the supplied authenticator """ if not self.authenticator: return payload try: self.authenticator.auth(payload) return self.authenticator.unsigned(payload) except AuthenticatorInvalidSignatu...
def decode(self, payload): """ Decode payload """ try: return self.encoder.decode(payload) except Exception as exception: raise DecodeError(str(exception))
def encode(self, payload): """ Encode payload """ try: return self.encoder.encode(payload) except Exception as exception: raise EncodeError(str(exception))
def start(self): """ Start and listen for calls """ if threading.current_thread().name == 'MainThread': signal.signal(signal.SIGINT, self.stop) logging.info('Started on {}'.format(self.address)) while True: self.process()
def stop(self, dummy_signum=None, dummy_frame=None): """ Shutdown process (this method is also a signal handler) """ logging.info('Shutting down ...') self.socket.close() sys.exit(0)
def get_summary(list_all=[], **kwargs): ''' summarize the report data @param list_all: a list which save the report data @param kwargs: such as show_all: True/False report show all status cases proj_name: project name home_pag...
def add_report_data(list_all=[], module_name="TestModule", **kwargs): ''' add report data to a list @param list_all: a list which save the report data @param module_name: test set name or test module name @param kwargs: such as case_name: testcase name ...
def parse(self, subscription): """ Fetch the function registered for a certain subscription """ for name in self.methods: tag = bytes(name.encode('utf-8')) if subscription.startswith(tag): fun = self.methods.get(name) message = subscription[len(ta...
def subscribe(self, tag, fun, description=None): """ Subscribe to something and register a function """ self.methods[tag] = fun self.descriptions[tag] = description self.socket.set_string_option(nanomsg.SUB, nanomsg.SUB_SUBSCRIBE, tag)
def process(self): """ Receive a subscription from the socket and process it """ subscription = None result = None try: subscription = self.socket.recv() except AuthenticateError as exception: logging.error( 'Subscriber error while authe...
def build_payload(self, tag, message): """ Encode, sign payload(optional) and attach subscription tag """ message = self.encode(message) message = self.sign(message) payload = bytes(tag.encode('utf-8')) + message return payload
def publish(self, tag, message): """ Publish a message down the socket """ payload = self.build_payload(tag, message) self.socket.send(payload)
def start_service(addr, n, authenticator): """ Start a service """ s = Subscriber(addr, authenticator=authenticator) def do_something(line): pass s.subscribe('test', do_something) started = time.time() for _ in range(n): s.process() s.socket.close() duration = time.ti...
def bench(client, n): """ Benchmark n requests """ items = list(range(n)) # Time client publish operations # ------------------------------ started = time.time() for i in items: client.publish('test', i) duration = time.time() - started print('Publisher client stats:') util...
def get_webpack(request, name='DEFAULT'): """ Get the Webpack object for a given webpack config. Called at most once per request per config name. """ if not hasattr(request, '_webpack_map'): request._webpack_map = {} wp = request._webpack_map.get(name) if wp is None: wp = re...
def includeme(config): """ Add pyramid_webpack methods and config to the app """ settings = config.registry.settings root_package_name = config.root_package.__name__ config.registry.webpack = { 'DEFAULT': WebpackState(settings, root_package_name) } for extra_config in aslist(settings.get...
def _get_setting(self, setting, default=None, name=None, inherit=True): """ Helper function to fetch settings, inheriting from the base """ if name is None: name = self.name if name == 'DEFAULT': return self._settings.get('webpack.{0}'.format(setting), default) el...
def load_stats(self, cache=None, wait=None): """ Load and cache the webpack-stats file """ if cache is None: cache = not self.debug if wait is None: wait = self.debug if not cache or self._stats is None: self._stats = self._load_stats() sta...
def _load_stats(self): """ Load the webpack-stats file """ for attempt in range(0, 3): try: with self.stats_file.open() as f: return json.load(f) except ValueError: # If we failed to parse the JSON, it's possible that the ...
def _chunk_filter(self, extensions): """ Create a filter from the extensions and ignore files """ if isinstance(extensions, six.string_types): extensions = extensions.split() def _filter(chunk): """ Exclusion filter """ name = chunk['name'] if ext...
def _add_url(self, chunk): """ Add a 'url' property to a chunk and return it """ if 'url' in chunk: return chunk public_path = chunk.get('publicPath') if public_path: chunk['url'] = public_path else: fullpath = posixpath.join(self.state.static_...
def get_bundle(self, bundle_name, extensions=None): """ Get all the chunks contained in a bundle """ if self.stats.get('status') == 'done': bundle = self.stats.get('chunks', {}).get(bundle_name, None) if bundle is None: raise KeyError('No such bundle {0!r}.'.forma...
def _unique_names(): """Generates unique sequences of bytes. """ characters = ("abcdefghijklmnopqrstuvwxyz" "0123456789") characters = [characters[i:i + 1] for i in irange(len(characters))] rng = random.Random() while True: letters = [rng.choice(characters) for i in ira...
def escape_queue(s): """Escapes the path to a queue, e.g. preserves ~ at the begining. """ if isinstance(s, PosixPath): s = unicode_(s) elif isinstance(s, bytes): s = s.decode('utf-8') if s.startswith('~/'): return '~/' + shell_escape(s[2:]) else: return shell_esc...
def parse_ssh_destination(destination): """Parses the SSH destination argument. """ match = _re_ssh.match(destination) if not match: raise InvalidDestination("Invalid destination: %s" % destination) user, password, host, port = match.groups() info = {} if user: info['username...
def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh
def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.desti...
def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh....
def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() ...
def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret)
def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output
def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a li...
def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken ...
def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if ...
def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % ...
def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, ...
def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / ...
def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) ...
def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), ...
def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None ...
def multi_substitution(*substitutions): """ Take a sequence of pairs specifying substitutions, and create a function that performs those substitutions. >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo') 'baz' """ substitutions = itertools.starmap(substitution, substitutions) # compose function appli...
def simple_html_strip(s): r""" Remove HTML from the string `s`. >>> str(simple_html_strip('')) '' >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise')) A stormy day in paradise >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.')) Somebody tell the truth. >>> print(simpl...
def remove_prefix(text, prefix): """ Remove the prefix from the text if it exists. >>> remove_prefix('underwhelming performance', 'underwhelming ') 'performance' >>> remove_prefix('something special', 'sample') 'something special' """ null, prefix, rest = text.rpartition(prefix) return rest
def remove_suffix(text, suffix): """ Remove the suffix from the text if it exists. >>> remove_suffix('name.git', '.git') 'name' >>> remove_suffix('something special', 'sample') 'something special' """ rest, suffix, null = text.partition(suffix) return rest
def common_prefix(s1, s2): """ Return the common prefix of two lines. """ index = min(len(s1), len(s2)) while s1[:index] != s2[:index]: index -= 1 return s1[:index]
def _get_graph(self, ctx, bundle, extensions, caller=None): """ Run a graph and render the tag contents for each output """ request = ctx.get('request') if request is None: request = get_current_request() if ':' in bundle: config_name, bundle = bundle.split(':') ...
def activate(lancet, method, project): """Switch to this project.""" with taskstatus("Looking up project") as ts: if method == "key": func = get_project_keys elif method == "dir": func = get_project_keys for key, project_path in func(lancet): if key.l...
def workon(ctx, issue_id, new, base_branch): """ Start work on a given issue. This command retrieves the issue from the issue tracker, creates and checks out a new aptly-named branch, puts the issue in the configured active, status, assigns it to you and starts a correctly linked Harvest timer. ...
def time(lancet, issue): """ Start an Harvest timer for the given issue. This command takes care of linking the timer with the issue tracker page for the given issue. If the issue is not passed to command it's taken from currently active branch. """ issue = get_issue(lancet, issue) wit...
def pause(ctx): """ Pause work on the current issue. This command puts the issue in the configured paused status and stops the current Harvest timer. """ lancet = ctx.obj paused_status = lancet.config.get("tracker", "paused_status") # Get the issue issue = get_issue(lancet) # ...
def resume(ctx): """ Resume work on the currently active issue. The issue is retrieved from the currently active branch name. """ lancet = ctx.obj username = lancet.tracker.whoami() active_status = lancet.config.get("tracker", "active_status") # Get the issue issue = get_issue(lan...
def ssh(lancet, print_cmd, environment): """ SSH into the given environment, based on the dploi configuration. """ namespace = {} with open(lancet.config.get('dploi', 'deployment_spec')) as fh: code = compile(fh.read(), 'deployment.py', 'exec') exec(code, {}, namespace) config ...
def _setup_helper(): """Print the shell integration code.""" base = os.path.abspath(os.path.dirname(__file__)) helper = os.path.join(base, "helper.sh") with open(helper) as fh: click.echo(fh.read())
def _commands(ctx): """Prints a list of commands for shell completion hooks.""" ctx = ctx.parent ctx.show_hidden_subcommands = False main = ctx.command for subcommand in main.list_commands(ctx): cmd = main.get_command(ctx, subcommand) if cmd is None: continue hel...
def _arguments(ctx, command_name=None): """Prints a list of arguments for shell completion hooks. If a command name is given, returns the arguments for that subcommand. The command name has to refer to a command; aliases are not supported. """ ctx = ctx.parent main = ctx.command if command_...
def _autocomplete(ctx, shell): """Print the shell autocompletion code.""" if not shell: shell = os.environ.get("SHELL", "") shell = os.path.basename(shell).lower() if not shell: click.secho( "Your shell could not be detected, please pass its name " "as the arg...
def raisefrom(exc_type, message, exc): # type: (Any, str, BaseException) -> None """Call Python 3 raise from or emulate it for Python 2 Args: exc_type (Any): Type of Exception message (str): Error message to display exc (BaseException): original exception Returns: None ...
def init_runner(self, parser, tracers, projinfo): ''' initial some instances for preparing to run test case @note: should not override @param parser: instance of TestCaseParser @param tracers: dict type for the instance of Tracer. Such as {"":tracer_obj} or {"192.168.0.1:5555":trace...
def _run_grid_multiprocess(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' multiprocessing.freeze_support() ...
def _run_grid_multithread(self, func, iterables): ''' running case with mutil process to support selenium grid-mode(multiple web) and appium grid-mode(multiple devices). @param func: function object @param iterables: iterable objects ''' f = lambda x: threading....
def init_project_env(subject='Automation', proj_path = None, sysencoding = "utf-8", debug = False): ''' Set the environment for pyrunner ''' # if sysencoding: # set_sys_encode(sysencoding) if not proj_path: try: executable_file_path = os.path.dirname(os....
def seqfy(strs): ''' 序列化 字符串--->实际效果是,为字符串,添加行号,返回字符串 Sampe usage: strs = ["", None, u"First-line\nSecond-line\nThird-line", u"没有换行符"] for s in strs: print "---" result = seqfy(s) print result print unseqfy(result) ''' if no...
def stepfy(strs): ''' 步骤化 字符串 --->实际效果是, 依据 序列化的字符串,转换为 Step_%s_info 的字典, 返回字典 Sample usage: test_strs = [ "", None, u"First-line\nSecond-line\nThird-line", u'1.First-line\n2.Second-line\n3.Third-line\n', u'3.没有换行符', u'3.有换行符\n', "asdfasd...
def map_function(func_str, fw_action_addtion=None,bw_action_addtion=None, alias_func=None): ''' Sample usage: print map_function('set',alias_func = "ini_items");# -> ini_items print map_function('set',fw_action_addtion="action_steps_",bw_action_addtion="_for_upd",alias_func = "ini_items"); # -> a...
def until_cmd(listcmd, end_expects=None, save2logfile=None, coding = encoding): ''' 执行系统命令,并等待执行完 @param listcmd: 执行的命令,列表格式 @param end_expects: 命令执行结束,在输出的最后一行,正则搜素期望值,并设置 结果标志 @param save2logfile: 设置执行过程,保存的日志 @param coding: 设置输出编码 ''' ...
def until(method, timeout = 30, message=''): """Calls the method until the return value is not False.""" end_time = time.time() + timeout while True: try: value = method() if value: return value except: ...
def _check_format(file_path, content): """ check testcase format if valid """ if not content: # testcase file content is empty err_msg = u"Testcase file content is empty: {}".format(file_path) raise p_exception.FileFormatError(err_msg) elif no...
def _load_yaml_file(yaml_file): """ load yaml file and check file content format """ with io.open(yaml_file, 'r', encoding='utf-8') as stream: yaml_content = yaml.load(stream) FileUtils._check_format(yaml_file, yaml_content) return yaml_content
def _load_json_file(json_file): """ load json file and check file content format """ with io.open(json_file, encoding='utf-8') as data_file: try: json_content = json.load(data_file) except p_exception.JSONDecodeError: err_msg = u"JSO...
def _load_csv_file(csv_file): """ load csv file and check file content format @param csv_file: csv file path e.g. csv file content: username,password test1,111111 test2,222222 test3,333333 @return ...
def force_delete_file(file_path): ''' force delete a file ''' if os.path.isfile(file_path): try: os.remove(file_path) return file_path except: return FileSystemUtils.add_unique_postfix(file_path) else: ...
def mkzip(source_dir, output_filename): '''Usage: p = r'D:\auto\env\ttest\ins\build\lib\rock4\softtest\support' mkzip(os.path.join(p, "appiumroot"),os.path.join(p, "appiumroot.zip")) unzip(os.path.join(p, "appiumroot.zip"),os.path.join(p, "appiumroot2")) ''' ...
def get_imported_module_from_file(file_path): """ import module from python file path and return imported module """ if p_compat.is_py3: imported_module = importlib.machinery.SourceFileLoader('module_name', file_path).load_module() elif p_compat.is_py2: impo...